2011-11-06 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-mudflap.c
blob9479aebfb342153eec60ec74f9d37b6d556331f9
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-inline.h"
34 #include "gimple.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "hashtab.h"
41 #include "diagnostic.h"
42 #include "demangle.h"
43 #include "langhooks.h"
44 #include "ggc.h"
45 #include "cgraph.h"
46 #include "gimple.h"
48 /* Internal function decls */
51 /* Options. */
52 #define flag_mudflap_threads (flag_mudflap == 2)
54 /* Helpers. */
55 static tree mf_build_string (const char *string);
56 static tree mf_varname_tree (tree);
57 static tree mf_file_function_line_tree (location_t);
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq, tree);
67 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68 struct walk_stmt_info *);
69 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70 static unsigned int execute_mudflap_function_decls (void);
73 /* ------------------------------------------------------------------------ */
74 /* Some generally helpful functions for mudflap instrumentation. */
76 /* Build a reference to a literal string. */
77 static tree
78 mf_build_string (const char *string)
80 size_t len = strlen (string);
81 tree result = mf_mark (build_string (len + 1, string));
83 TREE_TYPE (result) = build_array_type
84 (char_type_node, build_index_type (size_int (len)));
85 TREE_CONSTANT (result) = 1;
86 TREE_READONLY (result) = 1;
87 TREE_STATIC (result) = 1;
89 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
91 return mf_mark (result);
94 /* Create a properly typed STRING_CST node that describes the given
95 declaration. It will be used as an argument for __mf_register().
96 Try to construct a helpful string, including file/function/variable
97 name. */
99 static tree
100 mf_varname_tree (tree decl)
102 static pretty_printer buf_rec;
103 static int initialized = 0;
104 pretty_printer *buf = & buf_rec;
105 const char *buf_contents;
106 tree result;
108 gcc_assert (decl);
110 if (!initialized)
112 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
113 initialized = 1;
115 pp_clear_output_area (buf);
117 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
119 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
120 const char *sourcefile;
121 unsigned sourceline = xloc.line;
122 unsigned sourcecolumn = 0;
123 sourcecolumn = xloc.column;
124 sourcefile = xloc.file;
125 if (sourcefile == NULL && current_function_decl != NULL_TREE)
126 sourcefile = DECL_SOURCE_FILE (current_function_decl);
127 if (sourcefile == NULL)
128 sourcefile = "<unknown file>";
130 pp_string (buf, sourcefile);
132 if (sourceline != 0)
134 pp_string (buf, ":");
135 pp_decimal_int (buf, sourceline);
137 if (sourcecolumn != 0)
139 pp_string (buf, ":");
140 pp_decimal_int (buf, sourcecolumn);
145 if (current_function_decl != NULL_TREE)
147 /* Add (FUNCTION) */
148 pp_string (buf, " (");
150 const char *funcname = NULL;
151 if (DECL_NAME (current_function_decl))
152 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
153 if (funcname == NULL)
154 funcname = "anonymous fn";
156 pp_string (buf, funcname);
158 pp_string (buf, ") ");
160 else
161 pp_string (buf, " ");
163 /* Add <variable-declaration>, possibly demangled. */
165 const char *declname = NULL;
167 if (DECL_NAME (decl) != NULL)
169 if (strcmp ("GNU C++", lang_hooks.name) == 0)
171 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
172 the libiberty demangler. */
173 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
174 DMGL_AUTO | DMGL_VERBOSE);
176 if (declname == NULL)
177 declname = lang_hooks.decl_printable_name (decl, 3);
179 if (declname == NULL)
180 declname = "<unnamed variable>";
182 pp_string (buf, declname);
185 /* Return the lot as a new STRING_CST. */
186 buf_contents = pp_base_formatted_text (buf);
187 result = mf_build_string (buf_contents);
188 pp_clear_output_area (buf);
190 return result;
194 /* And another friend, for producing a simpler message. */
196 static tree
197 mf_file_function_line_tree (location_t location)
199 expanded_location xloc = expand_location (location);
200 const char *file = NULL, *colon, *line, *op, *name, *cp;
201 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
202 char *string;
203 tree result;
205 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
206 file = xloc.file;
207 if (file == NULL && current_function_decl != NULL_TREE)
208 file = DECL_SOURCE_FILE (current_function_decl);
209 if (file == NULL)
210 file = "<unknown file>";
212 if (xloc.line > 0)
214 if (xloc.column > 0)
215 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
216 else
217 sprintf (linecolbuf, "%d", xloc.line);
218 colon = ":";
219 line = linecolbuf;
221 else
222 colon = line = "";
224 /* Add (FUNCTION). */
225 name = lang_hooks.decl_printable_name (current_function_decl, 1);
226 if (name)
228 op = " (";
229 cp = ")";
231 else
232 op = name = cp = "";
234 string = concat (file, colon, line, op, name, cp, NULL);
235 result = mf_build_string (string);
236 free (string);
238 return result;
242 /* global tree nodes */
244 /* Global tree objects for global variables and functions exported by
245 mudflap runtime library. mf_init_extern_trees must be called
246 before using these. */
248 /* uintptr_t (usually "unsigned long") */
249 static GTY (()) tree mf_uintptr_type;
251 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
252 static GTY (()) tree mf_cache_struct_type;
254 /* struct __mf_cache * const */
255 static GTY (()) tree mf_cache_structptr_type;
257 /* extern struct __mf_cache __mf_lookup_cache []; */
258 static GTY (()) tree mf_cache_array_decl;
260 /* extern unsigned char __mf_lc_shift; */
261 static GTY (()) tree mf_cache_shift_decl;
263 /* extern uintptr_t __mf_lc_mask; */
264 static GTY (()) tree mf_cache_mask_decl;
266 /* Their function-scope local shadows, used in single-threaded mode only. */
268 /* auto const unsigned char __mf_lc_shift_l; */
269 static GTY (()) tree mf_cache_shift_decl_l;
271 /* auto const uintptr_t __mf_lc_mask_l; */
272 static GTY (()) tree mf_cache_mask_decl_l;
274 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
275 static GTY (()) tree mf_check_fndecl;
277 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_register_fndecl;
280 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
281 static GTY (()) tree mf_unregister_fndecl;
283 /* extern void __mf_init (); */
284 static GTY (()) tree mf_init_fndecl;
286 /* extern int __mf_set_options (const char*); */
287 static GTY (()) tree mf_set_options_fndecl;
290 /* Helper for mudflap_init: construct a decl with the given category,
291 name, and type, mark it an external reference, and pushdecl it. */
292 static inline tree
293 mf_make_builtin (enum tree_code category, const char *name, tree type)
295 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
296 category, get_identifier (name), type));
297 TREE_PUBLIC (decl) = 1;
298 DECL_EXTERNAL (decl) = 1;
299 lang_hooks.decls.pushdecl (decl);
300 /* The decl was declared by the compiler. */
301 DECL_ARTIFICIAL (decl) = 1;
302 /* And we don't want debug info for it. */
303 DECL_IGNORED_P (decl) = 1;
304 return decl;
307 /* Helper for mudflap_init: construct a tree corresponding to the type
308 struct __mf_cache { uintptr_t low; uintptr_t high; };
309 where uintptr_t is the FIELD_TYPE argument. */
310 static inline tree
311 mf_make_mf_cache_struct_type (tree field_type)
313 /* There is, abominably, no language-independent way to construct a
314 RECORD_TYPE. So we have to call the basic type construction
315 primitives by hand. */
316 tree fieldlo = build_decl (UNKNOWN_LOCATION,
317 FIELD_DECL, get_identifier ("low"), field_type);
318 tree fieldhi = build_decl (UNKNOWN_LOCATION,
319 FIELD_DECL, get_identifier ("high"), field_type);
321 tree struct_type = make_node (RECORD_TYPE);
322 DECL_CONTEXT (fieldlo) = struct_type;
323 DECL_CONTEXT (fieldhi) = struct_type;
324 DECL_CHAIN (fieldlo) = fieldhi;
325 TYPE_FIELDS (struct_type) = fieldlo;
326 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
327 layout_type (struct_type);
329 return struct_type;
332 /* Initialize the global tree nodes that correspond to mf-runtime.h
333 declarations. */
334 void
335 mudflap_init (void)
337 static bool done = false;
338 tree mf_const_string_type;
339 tree mf_cache_array_type;
340 tree mf_check_register_fntype;
341 tree mf_unregister_fntype;
342 tree mf_init_fntype;
343 tree mf_set_options_fntype;
345 if (done)
346 return;
347 done = true;
349 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
350 /*unsignedp=*/true);
351 mf_const_string_type
352 = build_pointer_type (build_qualified_type
353 (char_type_node, TYPE_QUAL_CONST));
355 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
356 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
357 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
358 mf_check_register_fntype =
359 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
360 integer_type_node, mf_const_string_type, NULL_TREE);
361 mf_unregister_fntype =
362 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
363 integer_type_node, NULL_TREE);
364 mf_init_fntype =
365 build_function_type_list (void_type_node, NULL_TREE);
366 mf_set_options_fntype =
367 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
369 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
370 mf_cache_array_type);
371 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
372 unsigned_char_type_node);
373 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
374 mf_uintptr_type);
375 /* Don't process these in mudflap_enqueue_decl, should they come by
376 there for some reason. */
377 mf_mark (mf_cache_array_decl);
378 mf_mark (mf_cache_shift_decl);
379 mf_mark (mf_cache_mask_decl);
380 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
381 mf_check_register_fntype);
382 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
383 mf_check_register_fntype);
384 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
385 mf_unregister_fntype);
386 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
387 mf_init_fntype);
388 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
389 mf_set_options_fntype);
393 /* ------------------------------------------------------------------------ */
394 /* This is the second part of the mudflap instrumentation. It works on
395 low-level GIMPLE using the CFG, because we want to run this pass after
396 tree optimizations have been performed, but we have to preserve the CFG
397 for expansion from trees to RTL.
398 Below is the list of transformations performed on statements in the
399 current function.
401 1) Memory reference transforms: Perform the mudflap indirection-related
402 tree transforms on memory references.
404 2) Mark BUILTIN_ALLOCA calls not inlineable.
408 static unsigned int
409 execute_mudflap_function_ops (void)
411 struct gimplify_ctx gctx;
413 /* Don't instrument functions such as the synthetic constructor
414 built during mudflap_finish_file. */
415 if (mf_marked_p (current_function_decl) ||
416 DECL_ARTIFICIAL (current_function_decl))
417 return 0;
419 push_gimplify_context (&gctx);
421 add_referenced_var (mf_cache_array_decl);
422 add_referenced_var (mf_cache_shift_decl);
423 add_referenced_var (mf_cache_mask_decl);
425 /* In multithreaded mode, don't cache the lookup cache parameters. */
426 if (! flag_mudflap_threads)
427 mf_decl_cache_locals ();
429 mf_xform_statements ();
431 if (! flag_mudflap_threads)
432 mf_decl_clear_locals ();
434 pop_gimplify_context (NULL);
435 return 0;
438 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
439 if BB has more than one edge, STMT will be replicated for each edge.
440 Also, abnormal edges will be ignored. */
442 static void
443 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
445 edge e;
446 edge_iterator ei;
447 unsigned n_copies = -1;
449 FOR_EACH_EDGE (e, ei, bb->succs)
450 if (!(e->flags & EDGE_ABNORMAL))
451 n_copies++;
453 FOR_EACH_EDGE (e, ei, bb->succs)
454 if (!(e->flags & EDGE_ABNORMAL))
455 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
458 /* Create and initialize local shadow variables for the lookup cache
459 globals. Put their decls in the *_l globals for use by
460 mf_build_check_statement_for. */
462 static void
463 mf_decl_cache_locals (void)
465 gimple g;
466 gimple_seq seq = gimple_seq_alloc ();
468 /* Build the cache vars. */
469 mf_cache_shift_decl_l
470 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
471 "__mf_lookup_shift_l"));
473 mf_cache_mask_decl_l
474 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
475 "__mf_lookup_mask_l"));
477 /* Build initialization nodes for the cache vars. We just load the
478 globals into the cache variables. */
479 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
480 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
481 gimple_seq_add_stmt (&seq, g);
483 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
484 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
485 gimple_seq_add_stmt (&seq, g);
487 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
489 gsi_commit_edge_inserts ();
493 static void
494 mf_decl_clear_locals (void)
496 /* Unset local shadows. */
497 mf_cache_shift_decl_l = NULL_TREE;
498 mf_cache_mask_decl_l = NULL_TREE;
501 static void
502 mf_build_check_statement_for (tree base, tree limit,
503 gimple_stmt_iterator *instr_gsi,
504 location_t location, tree dirflag)
506 gimple_stmt_iterator gsi;
507 basic_block cond_bb, then_bb, join_bb;
508 edge e;
509 tree cond, t, u, v;
510 tree mf_base;
511 tree mf_elem;
512 tree mf_limit;
513 gimple g;
514 gimple_seq seq, stmts;
516 /* We first need to split the current basic block, and start altering
517 the CFG. This allows us to insert the statements we're about to
518 construct into the right basic blocks. */
520 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
521 gsi = *instr_gsi;
522 gsi_prev (&gsi);
523 if (! gsi_end_p (gsi))
524 e = split_block (cond_bb, gsi_stmt (gsi));
525 else
526 e = split_block_after_labels (cond_bb);
527 cond_bb = e->src;
528 join_bb = e->dest;
530 /* A recap at this point: join_bb is the basic block at whose head
531 is the gimple statement for which this check expression is being
532 built. cond_bb is the (possibly new, synthetic) basic block the
533 end of which will contain the cache-lookup code, and a
534 conditional that jumps to the cache-miss code or, much more
535 likely, over to join_bb. */
537 /* Create the bb that contains the cache-miss fallback block (mf_check). */
538 then_bb = create_empty_bb (cond_bb);
539 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
540 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
542 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
543 e = find_edge (cond_bb, join_bb);
544 e->flags = EDGE_FALSE_VALUE;
545 e->count = cond_bb->count;
546 e->probability = REG_BR_PROB_BASE;
548 /* Update dominance info. Note that bb_join's data was
549 updated by split_block. */
550 if (dom_info_available_p (CDI_DOMINATORS))
552 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
553 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
556 /* Build our local variables. */
557 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
558 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
559 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
561 /* Build: __mf_base = (uintptr_t) <base address expression>. */
562 seq = gimple_seq_alloc ();
563 t = fold_convert_loc (location, mf_uintptr_type,
564 unshare_expr (base));
565 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
566 gimple_seq_add_seq (&seq, stmts);
567 g = gimple_build_assign (mf_base, t);
568 gimple_set_location (g, location);
569 gimple_seq_add_stmt (&seq, g);
571 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
572 t = fold_convert_loc (location, mf_uintptr_type,
573 unshare_expr (limit));
574 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
575 gimple_seq_add_seq (&seq, stmts);
576 g = gimple_build_assign (mf_limit, t);
577 gimple_set_location (g, location);
578 gimple_seq_add_stmt (&seq, g);
580 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
581 & __mf_mask]. */
582 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
583 flag_mudflap_threads ? mf_cache_shift_decl
584 : mf_cache_shift_decl_l);
585 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
586 flag_mudflap_threads ? mf_cache_mask_decl
587 : mf_cache_mask_decl_l);
588 t = build4 (ARRAY_REF,
589 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
590 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
591 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
592 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
593 gimple_seq_add_seq (&seq, stmts);
594 g = gimple_build_assign (mf_elem, t);
595 gimple_set_location (g, location);
596 gimple_seq_add_stmt (&seq, g);
598 /* Quick validity check.
600 if (__mf_elem->low > __mf_base
601 || (__mf_elem_high < __mf_limit))
603 __mf_check ();
604 ... and only if single-threaded:
605 __mf_lookup_shift_1 = f...;
606 __mf_lookup_mask_l = ...;
609 It is expected that this body of code is rarely executed so we mark
610 the edge to the THEN clause of the conditional jump as unlikely. */
612 /* Construct t <-- '__mf_elem->low > __mf_base'. */
613 t = build3 (COMPONENT_REF, mf_uintptr_type,
614 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
615 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
616 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
618 /* Construct '__mf_elem->high < __mf_limit'.
620 First build:
621 1) u <-- '__mf_elem->high'
622 2) v <-- '__mf_limit'.
624 Then build 'u <-- (u < v). */
626 u = build3 (COMPONENT_REF, mf_uintptr_type,
627 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
628 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
630 v = mf_limit;
632 u = build2 (LT_EXPR, boolean_type_node, u, v);
634 /* Build the composed conditional: t <-- 't || u'. Then store the
635 result of the evaluation of 't' in a temporary variable which we
636 can use as the condition for the conditional jump. */
637 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
638 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
639 gimple_seq_add_seq (&seq, stmts);
640 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
641 g = gimple_build_assign (cond, t);
642 gimple_set_location (g, location);
643 gimple_seq_add_stmt (&seq, g);
645 /* Build the conditional jump. 'cond' is just a temporary so we can
646 simply build a void COND_EXPR. We do need labels in both arms though. */
647 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
648 NULL_TREE);
649 gimple_set_location (g, location);
650 gimple_seq_add_stmt (&seq, g);
652 /* At this point, after so much hard work, we have only constructed
653 the conditional jump,
655 if (__mf_elem->low > __mf_base
656 || (__mf_elem_high < __mf_limit))
658 The lowered GIMPLE tree representing this code is in the statement
659 list starting at 'head'.
661 We can insert this now in the current basic block, i.e. the one that
662 the statement we're instrumenting was originally in. */
663 gsi = gsi_last_bb (cond_bb);
664 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
666 /* Now build up the body of the cache-miss handling:
668 __mf_check();
669 refresh *_l vars.
671 This is the body of the conditional. */
673 seq = gimple_seq_alloc ();
674 /* u is a string, so it is already a gimple value. */
675 u = mf_file_function_line_tree (location);
676 /* NB: we pass the overall [base..limit] range to mf_check. */
677 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
678 fold_build2_loc (location,
679 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
680 build_int_cst (mf_uintptr_type, 1));
681 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
682 gimple_seq_add_seq (&seq, stmts);
683 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
684 gimple_seq_add_stmt (&seq, g);
686 if (! flag_mudflap_threads)
688 if (stmt_ends_bb_p (g))
690 gsi = gsi_start_bb (then_bb);
691 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
692 e = split_block (then_bb, g);
693 then_bb = e->dest;
694 seq = gimple_seq_alloc ();
697 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
698 gimple_seq_add_stmt (&seq, g);
700 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
701 gimple_seq_add_stmt (&seq, g);
704 /* Insert the check code in the THEN block. */
705 gsi = gsi_start_bb (then_bb);
706 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
708 *instr_gsi = gsi_start_bb (join_bb);
712 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
713 eligible for instrumentation. For the mudflap1 pass, this implies
714 that it should be registered with the libmudflap runtime. For the
715 mudflap2 pass this means instrumenting an indirection operation with
716 respect to the object.
718 static int
719 mf_decl_eligible_p (tree decl)
721 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
722 /* The decl must have its address taken. In the case of
723 arrays, this flag is also set if the indexes are not
724 compile-time known valid constants. */
725 /* XXX: not sufficient: return-by-value structs! */
726 && TREE_ADDRESSABLE (decl)
727 /* The type of the variable must be complete. */
728 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
729 /* The decl hasn't been decomposed somehow. */
730 && !DECL_HAS_VALUE_EXPR_P (decl));
734 static void
735 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
736 location_t location, tree dirflag)
738 tree type, base, limit, addr, size, t;
740 /* Don't instrument read operations. */
741 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
742 return;
744 /* Don't instrument marked nodes. */
745 if (mf_marked_p (*tp))
746 return;
748 t = *tp;
749 type = TREE_TYPE (t);
751 if (type == error_mark_node)
752 return;
754 size = TYPE_SIZE_UNIT (type);
756 switch (TREE_CODE (t))
758 case ARRAY_REF:
759 case COMPONENT_REF:
761 /* This is trickier than it may first appear. The reason is
762 that we are looking at expressions from the "inside out" at
763 this point. We may have a complex nested aggregate/array
764 expression (e.g. "a.b[i].c"), maybe with an indirection as
765 the leftmost operator ("p->a.b.d"), where instrumentation
766 is necessary. Or we may have an innocent "a.b.c"
767 expression that must not be instrumented. We need to
768 recurse all the way down the nesting structure to figure it
769 out: looking just at the outer node is not enough. */
770 tree var;
771 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
772 /* If we have a bitfield component reference, we must note the
773 innermost addressable object in ELT, from which we will
774 construct the byte-addressable bounds of the bitfield. */
775 tree elt = NULL_TREE;
776 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
777 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
779 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
780 containment hierarchy to find the outermost VAR_DECL. */
781 var = TREE_OPERAND (t, 0);
782 while (1)
784 if (bitfield_ref_p && elt == NULL_TREE
785 && (TREE_CODE (var) == ARRAY_REF
786 || TREE_CODE (var) == COMPONENT_REF))
787 elt = var;
789 if (TREE_CODE (var) == ARRAY_REF)
791 component_ref_only = 0;
792 var = TREE_OPERAND (var, 0);
794 else if (TREE_CODE (var) == COMPONENT_REF)
795 var = TREE_OPERAND (var, 0);
796 else if (INDIRECT_REF_P (var)
797 || TREE_CODE (var) == MEM_REF)
799 base = TREE_OPERAND (var, 0);
800 break;
802 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
804 var = TREE_OPERAND (var, 0);
805 if (CONSTANT_CLASS_P (var)
806 && TREE_CODE (var) != STRING_CST)
807 return;
809 else
811 gcc_assert (TREE_CODE (var) == VAR_DECL
812 || TREE_CODE (var) == PARM_DECL
813 || TREE_CODE (var) == RESULT_DECL
814 || TREE_CODE (var) == STRING_CST);
815 /* Don't instrument this access if the underlying
816 variable is not "eligible". This test matches
817 those arrays that have only known-valid indexes,
818 and thus are not labeled TREE_ADDRESSABLE. */
819 if (! mf_decl_eligible_p (var) || component_ref_only)
820 return;
821 else
823 base = build1 (ADDR_EXPR,
824 build_pointer_type (TREE_TYPE (var)), var);
825 break;
830 /* Handle the case of ordinary non-indirection structure
831 accesses. These have only nested COMPONENT_REF nodes (no
832 INDIRECT_REF), but pass through the above filter loop.
833 Note that it's possible for such a struct variable to match
834 the eligible_p test because someone else might take its
835 address sometime. */
837 /* We need special processing for bitfield components, because
838 their addresses cannot be taken. */
839 if (bitfield_ref_p)
841 tree field = TREE_OPERAND (t, 1);
843 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
844 size = DECL_SIZE_UNIT (field);
846 if (elt)
847 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
848 elt);
849 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
850 addr = fold_build_pointer_plus_loc (location,
851 addr, byte_position (field));
853 else
854 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
856 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
857 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
858 fold_convert (mf_uintptr_type, addr),
859 size),
860 integer_one_node);
862 break;
864 case INDIRECT_REF:
865 addr = TREE_OPERAND (t, 0);
866 base = addr;
867 limit = fold_build_pointer_plus_hwi_loc
868 (location, fold_build_pointer_plus_loc (location, base, size), -1);
869 break;
871 case MEM_REF:
872 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
873 TREE_OPERAND (t, 1));
874 base = addr;
875 limit = fold_build_pointer_plus_hwi_loc (location,
876 fold_build_pointer_plus_loc (location,
877 base, size), -1);
878 break;
880 case TARGET_MEM_REF:
881 addr = tree_mem_ref_addr (ptr_type_node, t);
882 base = addr;
883 limit = fold_build_pointer_plus_hwi_loc (location,
884 fold_build_pointer_plus_loc (location,
885 base, size), -1);
886 break;
888 case ARRAY_RANGE_REF:
889 warning (OPT_Wmudflap,
890 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
891 return;
893 case BIT_FIELD_REF:
894 /* ??? merge with COMPONENT_REF code above? */
896 tree ofs, rem, bpu;
898 /* If we're not dereferencing something, then the access
899 must be ok. */
900 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
901 return;
903 bpu = bitsize_int (BITS_PER_UNIT);
904 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
905 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
906 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
908 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
909 size = size_binop_loc (location, PLUS_EXPR, size, rem);
910 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
911 size = fold_convert (sizetype, size);
913 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
914 addr = fold_convert (ptr_type_node, addr);
915 addr = fold_build_pointer_plus_loc (location, addr, ofs);
917 base = addr;
918 limit = fold_build_pointer_plus_hwi_loc (location,
919 fold_build_pointer_plus_loc (location,
920 base, size), -1);
922 break;
924 default:
925 return;
928 mf_build_check_statement_for (base, limit, iter, location, dirflag);
930 /* Transform
931 1) Memory references.
932 2) BUILTIN_ALLOCA calls.
934 static void
935 mf_xform_statements (void)
937 basic_block bb, next;
938 gimple_stmt_iterator i;
939 int saved_last_basic_block = last_basic_block;
940 enum gimple_rhs_class grhs_class;
942 bb = ENTRY_BLOCK_PTR ->next_bb;
945 next = bb->next_bb;
946 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
948 gimple s = gsi_stmt (i);
950 /* Only a few GIMPLE statements can reference memory. */
951 switch (gimple_code (s))
953 case GIMPLE_ASSIGN:
954 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
955 gimple_location (s), integer_one_node);
956 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
957 gimple_location (s), integer_zero_node);
958 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
959 if (grhs_class == GIMPLE_BINARY_RHS)
960 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
961 gimple_location (s), integer_zero_node);
962 break;
964 case GIMPLE_RETURN:
965 if (gimple_return_retval (s) != NULL_TREE)
967 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
968 gimple_location (s),
969 integer_zero_node);
971 break;
973 case GIMPLE_CALL:
975 tree fndecl = gimple_call_fndecl (s);
976 if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
977 || (DECL_FUNCTION_CODE (fndecl)
978 == BUILT_IN_ALLOCA_WITH_ALIGN)))
979 gimple_call_set_cannot_inline (s, true);
981 break;
983 default:
987 bb = next;
989 while (bb && bb->index <= saved_last_basic_block);
992 /* ------------------------------------------------------------------------ */
993 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
994 transforms on the current function.
996 This is the first part of the mudflap instrumentation. It works on
997 high-level GIMPLE because after lowering, all variables are moved out
998 of their BIND_EXPR binding context, and we lose liveness information
999 for the declarations we wish to instrument. */
1001 static unsigned int
1002 execute_mudflap_function_decls (void)
1004 struct gimplify_ctx gctx;
1006 /* Don't instrument functions such as the synthetic constructor
1007 built during mudflap_finish_file. */
1008 if (mf_marked_p (current_function_decl) ||
1009 DECL_ARTIFICIAL (current_function_decl))
1010 return 0;
1012 push_gimplify_context (&gctx);
1014 mf_xform_decls (gimple_body (current_function_decl),
1015 DECL_ARGUMENTS (current_function_decl));
1017 pop_gimplify_context (NULL);
1018 return 0;
1021 /* This struct is passed between mf_xform_decls to store state needed
1022 during the traversal searching for objects that have their
1023 addresses taken. */
1024 struct mf_xform_decls_data
1026 tree param_decls;
1030 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1031 _DECLs if appropriate. Arrange to call the __mf_register function
1032 now, and the __mf_unregister function later for each. Return the
1033 gimple sequence after synthesis. */
1034 gimple_seq
1035 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1037 gimple_seq finally_stmts = NULL;
1038 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1040 while (decl != NULL_TREE)
1042 if (mf_decl_eligible_p (decl)
1043 /* Not already processed. */
1044 && ! mf_marked_p (decl)
1045 /* Automatic variable. */
1046 && ! DECL_EXTERNAL (decl)
1047 && ! TREE_STATIC (decl))
1049 tree size = NULL_TREE, variable_name;
1050 gimple unregister_fncall, register_fncall;
1051 tree unregister_fncall_param, register_fncall_param;
1053 /* Variable-sized objects should have sizes already been
1054 gimplified when we got here. */
1055 size = fold_convert (size_type_node,
1056 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1057 gcc_assert (is_gimple_val (size));
1060 unregister_fncall_param =
1061 mf_mark (build1 (ADDR_EXPR,
1062 build_pointer_type (TREE_TYPE (decl)),
1063 decl));
1064 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1065 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1066 unregister_fncall_param,
1067 size,
1068 integer_three_node);
1071 variable_name = mf_varname_tree (decl);
1072 register_fncall_param =
1073 mf_mark (build1 (ADDR_EXPR,
1074 build_pointer_type (TREE_TYPE (decl)),
1075 decl));
1076 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1077 "name") */
1078 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1079 register_fncall_param,
1080 size,
1081 integer_three_node,
1082 variable_name);
1085 /* Accumulate the two calls. */
1086 gimple_set_location (register_fncall, location);
1087 gimple_set_location (unregister_fncall, location);
1089 /* Add the __mf_register call at the current appending point. */
1090 if (gsi_end_p (initially_stmts))
1092 if (!DECL_ARTIFICIAL (decl))
1093 warning (OPT_Wmudflap,
1094 "mudflap cannot track %qE in stub function",
1095 DECL_NAME (decl));
1097 else
1099 gsi_insert_before (&initially_stmts, register_fncall,
1100 GSI_SAME_STMT);
1102 /* Accumulate the FINALLY piece. */
1103 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1105 mf_mark (decl);
1108 decl = DECL_CHAIN (decl);
1111 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1112 if (finally_stmts != NULL)
1114 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1115 gimple_seq new_seq = gimple_seq_alloc ();
1117 gimple_seq_add_stmt (&new_seq, stmt);
1118 return new_seq;
1120 else
1121 return seq;
1125 /* Process every variable mentioned in BIND_EXPRs. */
1126 static tree
1127 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1128 bool *handled_operands_p ATTRIBUTE_UNUSED,
1129 struct walk_stmt_info *wi)
1131 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1132 gimple stmt = gsi_stmt (*gsi);
1134 switch (gimple_code (stmt))
1136 case GIMPLE_BIND:
1138 /* Process function parameters now (but only once). */
1139 if (d->param_decls)
1141 gimple_bind_set_body (stmt,
1142 mx_register_decls (d->param_decls,
1143 gimple_bind_body (stmt),
1144 gimple_location (stmt)));
1145 d->param_decls = NULL_TREE;
1148 gimple_bind_set_body (stmt,
1149 mx_register_decls (gimple_bind_vars (stmt),
1150 gimple_bind_body (stmt),
1151 gimple_location (stmt)));
1153 break;
1155 default:
1156 break;
1159 return NULL_TREE;
1162 /* Perform the object lifetime tracking mudflap transform on the given function
1163 tree. The tree is mutated in place, with possibly copied subtree nodes.
1165 For every auto variable declared, if its address is ever taken
1166 within the function, then supply its lifetime to the mudflap
1167 runtime with the __mf_register and __mf_unregister calls.
1170 static void
1171 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1173 struct mf_xform_decls_data d;
1174 struct walk_stmt_info wi;
1175 struct pointer_set_t *pset = pointer_set_create ();
1177 d.param_decls = fnparams;
1178 memset (&wi, 0, sizeof (wi));
1179 wi.info = (void*) &d;
1180 wi.pset = pset;
1181 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1182 pointer_set_destroy (pset);
1186 /* ------------------------------------------------------------------------ */
1187 /* Externally visible mudflap functions. */
1190 /* Mark and return the given tree node to prevent further mudflap
1191 transforms. */
1192 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1194 tree
1195 mf_mark (tree t)
1197 void **slot;
1199 if (marked_trees == NULL)
1200 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1201 NULL);
1203 slot = htab_find_slot (marked_trees, t, INSERT);
1204 *slot = t;
1205 return t;
1209 mf_marked_p (tree t)
1211 void *entry;
1213 if (marked_trees == NULL)
1214 return 0;
1216 entry = htab_find (marked_trees, t);
1217 return (entry != NULL);
1220 /* Remember given node as a static of some kind: global data,
1221 function-scope static, or an anonymous constant. Its assembler
1222 label is given. */
1224 /* A list of globals whose incomplete declarations we encountered.
1225 Instead of emitting the __mf_register call for them here, it's
1226 delayed until program finish time. If they're still incomplete by
1227 then, warnings are emitted. */
1229 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1231 /* A list of statements for calling __mf_register() at startup time. */
1232 static GTY (()) tree enqueued_call_stmt_chain;
1234 static void
1235 mudflap_register_call (tree obj, tree object_size, tree varname)
1237 tree arg, call_stmt;
1239 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1240 arg = fold_convert (ptr_type_node, arg);
1242 call_stmt = build_call_expr (mf_register_fndecl, 4,
1243 arg,
1244 fold_convert (size_type_node, object_size),
1245 /* __MF_TYPE_STATIC */
1246 build_int_cst (integer_type_node, 4),
1247 varname);
1249 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1252 void
1253 mudflap_enqueue_decl (tree obj)
1255 if (mf_marked_p (obj))
1256 return;
1258 /* We don't need to process variable decls that are internally
1259 generated extern. If we did, we'd end up with warnings for them
1260 during mudflap_finish_file (). That would confuse the user,
1261 since the text would refer to variables that don't show up in the
1262 user's source code. */
1263 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1264 return;
1266 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1270 void
1271 mudflap_enqueue_constant (tree obj)
1273 tree object_size, varname;
1275 if (mf_marked_p (obj))
1276 return;
1278 if (TREE_CODE (obj) == STRING_CST)
1279 object_size = size_int (TREE_STRING_LENGTH (obj));
1280 else
1281 object_size = size_in_bytes (TREE_TYPE (obj));
1283 if (TREE_CODE (obj) == STRING_CST)
1284 varname = mf_build_string ("string literal");
1285 else
1286 varname = mf_build_string ("constant");
1288 mudflap_register_call (obj, object_size, varname);
1292 /* Emit any file-wide instrumentation. */
1293 void
1294 mudflap_finish_file (void)
1296 tree ctor_statements = NULL_TREE;
1298 /* No need to continue when there were errors. */
1299 if (seen_error ())
1300 return;
1302 /* Insert a call to __mf_init. */
1304 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1305 append_to_statement_list (call2_stmt, &ctor_statements);
1308 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1309 if (flag_mudflap_ignore_reads)
1311 tree arg = mf_build_string ("-ignore-reads");
1312 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1313 append_to_statement_list (call_stmt, &ctor_statements);
1316 /* Process all enqueued object decls. */
1317 if (deferred_static_decls)
1319 size_t i;
1320 tree obj;
1321 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1323 gcc_assert (DECL_P (obj));
1325 if (mf_marked_p (obj))
1326 continue;
1328 /* Omit registration for static unaddressed objects. NB:
1329 Perform registration for non-static objects regardless of
1330 TREE_USED or TREE_ADDRESSABLE, because they may be used
1331 from other compilation units. */
1332 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1333 continue;
1335 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1337 warning (OPT_Wmudflap,
1338 "mudflap cannot track unknown size extern %qE",
1339 DECL_NAME (obj));
1340 continue;
1343 mudflap_register_call (obj,
1344 size_in_bytes (TREE_TYPE (obj)),
1345 mf_varname_tree (obj));
1348 VEC_truncate (tree, deferred_static_decls, 0);
1351 /* Append all the enqueued registration calls. */
1352 if (enqueued_call_stmt_chain)
1354 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1355 enqueued_call_stmt_chain = NULL_TREE;
1358 cgraph_build_static_cdtor ('I', ctor_statements,
1359 MAX_RESERVED_INIT_PRIORITY-1);
1363 static bool
1364 gate_mudflap (void)
1366 return flag_mudflap != 0;
1369 struct gimple_opt_pass pass_mudflap_1 =
1372 GIMPLE_PASS,
1373 "mudflap1", /* name */
1374 gate_mudflap, /* gate */
1375 execute_mudflap_function_decls, /* execute */
1376 NULL, /* sub */
1377 NULL, /* next */
1378 0, /* static_pass_number */
1379 TV_NONE, /* tv_id */
1380 PROP_gimple_any, /* properties_required */
1381 0, /* properties_provided */
1382 0, /* properties_destroyed */
1383 0, /* todo_flags_start */
1384 0 /* todo_flags_finish */
1388 struct gimple_opt_pass pass_mudflap_2 =
1391 GIMPLE_PASS,
1392 "mudflap2", /* name */
1393 gate_mudflap, /* gate */
1394 execute_mudflap_function_ops, /* execute */
1395 NULL, /* sub */
1396 NULL, /* next */
1397 0, /* static_pass_number */
1398 TV_NONE, /* tv_id */
1399 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1400 0, /* properties_provided */
1401 0, /* properties_destroyed */
1402 0, /* todo_flags_start */
1403 TODO_verify_flow | TODO_verify_stmts
1404 | TODO_update_ssa /* todo_flags_finish */
1408 #include "gt-tree-mudflap.h"