2009-08-05 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-mudflap.c
blob381b029d45f06a524e4299bb5d05eaabc4983c96
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-inline.h"
36 #include "gimple.h"
37 #include "tree-iterator.h"
38 #include "tree-flow.h"
39 #include "tree-mudflap.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "hashtab.h"
43 #include "diagnostic.h"
44 #include <demangle.h>
45 #include "langhooks.h"
46 #include "ggc.h"
47 #include "cgraph.h"
48 #include "toplev.h"
49 #include "gimple.h"
51 /* Internal function decls */
54 /* Options. */
55 #define flag_mudflap_threads (flag_mudflap == 2)
57 /* Helpers. */
58 static tree mf_build_string (const char *string);
59 static tree mf_varname_tree (tree);
60 static tree mf_file_function_line_tree (location_t);
62 /* Indirection-related instrumentation. */
63 static void mf_decl_cache_locals (void);
64 static void mf_decl_clear_locals (void);
65 static void mf_xform_statements (void);
66 static unsigned int execute_mudflap_function_ops (void);
68 /* Addressable variables instrumentation. */
69 static void mf_xform_decls (gimple_seq, tree);
70 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
71 struct walk_stmt_info *);
72 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
73 static unsigned int execute_mudflap_function_decls (void);
76 /* ------------------------------------------------------------------------ */
77 /* Some generally helpful functions for mudflap instrumentation. */
79 /* Build a reference to a literal string. */
80 static tree
81 mf_build_string (const char *string)
83 size_t len = strlen (string);
84 tree result = mf_mark (build_string (len + 1, string));
86 TREE_TYPE (result) = build_array_type
87 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
88 TREE_CONSTANT (result) = 1;
89 TREE_READONLY (result) = 1;
90 TREE_STATIC (result) = 1;
92 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
94 return mf_mark (result);
97 /* Create a properly typed STRING_CST node that describes the given
98 declaration. It will be used as an argument for __mf_register().
99 Try to construct a helpful string, including file/function/variable
100 name. */
102 static tree
103 mf_varname_tree (tree decl)
105 static pretty_printer buf_rec;
106 static int initialized = 0;
107 pretty_printer *buf = & buf_rec;
108 const char *buf_contents;
109 tree result;
111 gcc_assert (decl);
113 if (!initialized)
115 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
116 initialized = 1;
118 pp_clear_output_area (buf);
120 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
122 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
123 const char *sourcefile;
124 unsigned sourceline = xloc.line;
125 unsigned sourcecolumn = 0;
126 sourcecolumn = xloc.column;
127 sourcefile = xloc.file;
128 if (sourcefile == NULL && current_function_decl != NULL_TREE)
129 sourcefile = DECL_SOURCE_FILE (current_function_decl);
130 if (sourcefile == NULL)
131 sourcefile = "<unknown file>";
133 pp_string (buf, sourcefile);
135 if (sourceline != 0)
137 pp_string (buf, ":");
138 pp_decimal_int (buf, sourceline);
140 if (sourcecolumn != 0)
142 pp_string (buf, ":");
143 pp_decimal_int (buf, sourcecolumn);
148 if (current_function_decl != NULL_TREE)
150 /* Add (FUNCTION) */
151 pp_string (buf, " (");
153 const char *funcname = NULL;
154 if (DECL_NAME (current_function_decl))
155 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
156 if (funcname == NULL)
157 funcname = "anonymous fn";
159 pp_string (buf, funcname);
161 pp_string (buf, ") ");
163 else
164 pp_string (buf, " ");
166 /* Add <variable-declaration>, possibly demangled. */
168 const char *declname = NULL;
170 if (DECL_NAME (decl) != NULL)
172 if (strcmp ("GNU C++", lang_hooks.name) == 0)
174 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
175 the libiberty demangler. */
176 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
177 DMGL_AUTO | DMGL_VERBOSE);
179 if (declname == NULL)
180 declname = lang_hooks.decl_printable_name (decl, 3);
182 if (declname == NULL)
183 declname = "<unnamed variable>";
185 pp_string (buf, declname);
188 /* Return the lot as a new STRING_CST. */
189 buf_contents = pp_base_formatted_text (buf);
190 result = mf_build_string (buf_contents);
191 pp_clear_output_area (buf);
193 return result;
197 /* And another friend, for producing a simpler message. */
199 static tree
200 mf_file_function_line_tree (location_t location)
202 expanded_location xloc = expand_location (location);
203 const char *file = NULL, *colon, *line, *op, *name, *cp;
204 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
205 char *string;
206 tree result;
208 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
209 file = xloc.file;
210 if (file == NULL && current_function_decl != NULL_TREE)
211 file = DECL_SOURCE_FILE (current_function_decl);
212 if (file == NULL)
213 file = "<unknown file>";
215 if (xloc.line > 0)
217 if (xloc.column > 0)
218 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
219 else
220 sprintf (linecolbuf, "%d", xloc.line);
221 colon = ":";
222 line = linecolbuf;
224 else
225 colon = line = "";
227 /* Add (FUNCTION). */
228 name = lang_hooks.decl_printable_name (current_function_decl, 1);
229 if (name)
231 op = " (";
232 cp = ")";
234 else
235 op = name = cp = "";
237 string = concat (file, colon, line, op, name, cp, NULL);
238 result = mf_build_string (string);
239 free (string);
241 return result;
245 /* global tree nodes */
247 /* Global tree objects for global variables and functions exported by
248 mudflap runtime library. mf_init_extern_trees must be called
249 before using these. */
251 /* uintptr_t (usually "unsigned long") */
252 static GTY (()) tree mf_uintptr_type;
254 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
255 static GTY (()) tree mf_cache_struct_type;
257 /* struct __mf_cache * const */
258 static GTY (()) tree mf_cache_structptr_type;
260 /* extern struct __mf_cache __mf_lookup_cache []; */
261 static GTY (()) tree mf_cache_array_decl;
263 /* extern unsigned char __mf_lc_shift; */
264 static GTY (()) tree mf_cache_shift_decl;
266 /* extern uintptr_t __mf_lc_mask; */
267 static GTY (()) tree mf_cache_mask_decl;
269 /* Their function-scope local shadows, used in single-threaded mode only. */
271 /* auto const unsigned char __mf_lc_shift_l; */
272 static GTY (()) tree mf_cache_shift_decl_l;
274 /* auto const uintptr_t __mf_lc_mask_l; */
275 static GTY (()) tree mf_cache_mask_decl_l;
277 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_check_fndecl;
280 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
281 static GTY (()) tree mf_register_fndecl;
283 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
284 static GTY (()) tree mf_unregister_fndecl;
286 /* extern void __mf_init (); */
287 static GTY (()) tree mf_init_fndecl;
289 /* extern int __mf_set_options (const char*); */
290 static GTY (()) tree mf_set_options_fndecl;
293 /* Helper for mudflap_init: construct a decl with the given category,
294 name, and type, mark it an external reference, and pushdecl it. */
295 static inline tree
296 mf_make_builtin (enum tree_code category, const char *name, tree type)
298 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
299 category, get_identifier (name), type));
300 TREE_PUBLIC (decl) = 1;
301 DECL_EXTERNAL (decl) = 1;
302 lang_hooks.decls.pushdecl (decl);
303 /* The decl was declared by the compiler. */
304 DECL_ARTIFICIAL (decl) = 1;
305 /* And we don't want debug info for it. */
306 DECL_IGNORED_P (decl) = 1;
307 return decl;
310 /* Helper for mudflap_init: construct a tree corresponding to the type
311 struct __mf_cache { uintptr_t low; uintptr_t high; };
312 where uintptr_t is the FIELD_TYPE argument. */
313 static inline tree
314 mf_make_mf_cache_struct_type (tree field_type)
316 /* There is, abominably, no language-independent way to construct a
317 RECORD_TYPE. So we have to call the basic type construction
318 primitives by hand. */
319 tree fieldlo = build_decl (UNKNOWN_LOCATION,
320 FIELD_DECL, get_identifier ("low"), field_type);
321 tree fieldhi = build_decl (UNKNOWN_LOCATION,
322 FIELD_DECL, get_identifier ("high"), field_type);
324 tree struct_type = make_node (RECORD_TYPE);
325 DECL_CONTEXT (fieldlo) = struct_type;
326 DECL_CONTEXT (fieldhi) = struct_type;
327 TREE_CHAIN (fieldlo) = fieldhi;
328 TYPE_FIELDS (struct_type) = fieldlo;
329 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
330 layout_type (struct_type);
332 return struct_type;
335 #define build_function_type_0(rtype) \
336 build_function_type (rtype, void_list_node)
337 #define build_function_type_1(rtype, arg1) \
338 build_function_type (rtype, tree_cons (0, arg1, void_list_node))
339 #define build_function_type_3(rtype, arg1, arg2, arg3) \
340 build_function_type (rtype, \
341 tree_cons (0, arg1, \
342 tree_cons (0, arg2, \
343 tree_cons (0, arg3, \
344 void_list_node))))
345 #define build_function_type_4(rtype, arg1, arg2, arg3, arg4) \
346 build_function_type (rtype, \
347 tree_cons (0, arg1, \
348 tree_cons (0, arg2, \
349 tree_cons (0, arg3, \
350 tree_cons (0, arg4, \
351 void_list_node)))))
353 /* Initialize the global tree nodes that correspond to mf-runtime.h
354 declarations. */
355 void
356 mudflap_init (void)
358 static bool done = false;
359 tree mf_const_string_type;
360 tree mf_cache_array_type;
361 tree mf_check_register_fntype;
362 tree mf_unregister_fntype;
363 tree mf_init_fntype;
364 tree mf_set_options_fntype;
366 if (done)
367 return;
368 done = true;
370 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
371 /*unsignedp=*/true);
372 mf_const_string_type
373 = build_pointer_type (build_qualified_type
374 (char_type_node, TYPE_QUAL_CONST));
376 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
377 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
378 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
379 mf_check_register_fntype =
380 build_function_type_4 (void_type_node, ptr_type_node, size_type_node,
381 integer_type_node, mf_const_string_type);
382 mf_unregister_fntype =
383 build_function_type_3 (void_type_node, ptr_type_node, size_type_node,
384 integer_type_node);
385 mf_init_fntype =
386 build_function_type_0 (void_type_node);
387 mf_set_options_fntype =
388 build_function_type_1 (integer_type_node, mf_const_string_type);
390 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
391 mf_cache_array_type);
392 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
393 unsigned_char_type_node);
394 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
395 mf_uintptr_type);
396 /* Don't process these in mudflap_enqueue_decl, should they come by
397 there for some reason. */
398 mf_mark (mf_cache_array_decl);
399 mf_mark (mf_cache_shift_decl);
400 mf_mark (mf_cache_mask_decl);
401 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
402 mf_check_register_fntype);
403 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
404 mf_check_register_fntype);
405 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
406 mf_unregister_fntype);
407 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
408 mf_init_fntype);
409 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
410 mf_set_options_fntype);
412 #undef build_function_type_4
413 #undef build_function_type_3
414 #undef build_function_type_1
415 #undef build_function_type_0
418 /* ------------------------------------------------------------------------ */
419 /* This is the second part of the mudflap instrumentation. It works on
420 low-level GIMPLE using the CFG, because we want to run this pass after
421 tree optimizations have been performed, but we have to preserve the CFG
422 for expansion from trees to RTL.
423 Below is the list of transformations performed on statements in the
424 current function.
426 1) Memory reference transforms: Perform the mudflap indirection-related
427 tree transforms on memory references.
429 2) Mark BUILTIN_ALLOCA calls not inlineable.
433 static unsigned int
434 execute_mudflap_function_ops (void)
436 struct gimplify_ctx gctx;
438 /* Don't instrument functions such as the synthetic constructor
439 built during mudflap_finish_file. */
440 if (mf_marked_p (current_function_decl) ||
441 DECL_ARTIFICIAL (current_function_decl))
442 return 0;
444 push_gimplify_context (&gctx);
446 /* In multithreaded mode, don't cache the lookup cache parameters. */
447 if (! flag_mudflap_threads)
448 mf_decl_cache_locals ();
450 mf_xform_statements ();
452 if (! flag_mudflap_threads)
453 mf_decl_clear_locals ();
455 pop_gimplify_context (NULL);
456 return 0;
459 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
460 if BB has more than one edge, STMT will be replicated for each edge.
461 Also, abnormal edges will be ignored. */
463 static void
464 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
466 edge e;
467 edge_iterator ei;
468 unsigned n_copies = -1;
470 FOR_EACH_EDGE (e, ei, bb->succs)
471 if (!(e->flags & EDGE_ABNORMAL))
472 n_copies++;
474 FOR_EACH_EDGE (e, ei, bb->succs)
475 if (!(e->flags & EDGE_ABNORMAL))
476 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
479 /* Create and initialize local shadow variables for the lookup cache
480 globals. Put their decls in the *_l globals for use by
481 mf_build_check_statement_for. */
483 static void
484 mf_decl_cache_locals (void)
486 gimple g;
487 gimple_seq seq = gimple_seq_alloc ();
489 /* Build the cache vars. */
490 mf_cache_shift_decl_l
491 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
492 "__mf_lookup_shift_l"));
494 mf_cache_mask_decl_l
495 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
496 "__mf_lookup_mask_l"));
498 /* Build initialization nodes for the cache vars. We just load the
499 globals into the cache variables. */
500 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
501 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
502 gimple_seq_add_stmt (&seq, g);
504 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
505 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
506 gimple_seq_add_stmt (&seq, g);
508 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
510 gsi_commit_edge_inserts ();
514 static void
515 mf_decl_clear_locals (void)
517 /* Unset local shadows. */
518 mf_cache_shift_decl_l = NULL_TREE;
519 mf_cache_mask_decl_l = NULL_TREE;
522 static void
523 mf_build_check_statement_for (tree base, tree limit,
524 gimple_stmt_iterator *instr_gsi,
525 location_t location, tree dirflag)
527 gimple_stmt_iterator gsi;
528 basic_block cond_bb, then_bb, join_bb;
529 edge e;
530 tree cond, t, u, v;
531 tree mf_base;
532 tree mf_elem;
533 tree mf_limit;
534 gimple g;
535 gimple_seq seq, stmts;
537 /* We first need to split the current basic block, and start altering
538 the CFG. This allows us to insert the statements we're about to
539 construct into the right basic blocks. */
541 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
542 gsi = *instr_gsi;
543 gsi_prev (&gsi);
544 if (! gsi_end_p (gsi))
545 e = split_block (cond_bb, gsi_stmt (gsi));
546 else
547 e = split_block_after_labels (cond_bb);
548 cond_bb = e->src;
549 join_bb = e->dest;
551 /* A recap at this point: join_bb is the basic block at whose head
552 is the gimple statement for which this check expression is being
553 built. cond_bb is the (possibly new, synthetic) basic block the
554 end of which will contain the cache-lookup code, and a
555 conditional that jumps to the cache-miss code or, much more
556 likely, over to join_bb. */
558 /* Create the bb that contains the cache-miss fallback block (mf_check). */
559 then_bb = create_empty_bb (cond_bb);
560 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
561 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
563 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
564 e = find_edge (cond_bb, join_bb);
565 e->flags = EDGE_FALSE_VALUE;
566 e->count = cond_bb->count;
567 e->probability = REG_BR_PROB_BASE;
569 /* Update dominance info. Note that bb_join's data was
570 updated by split_block. */
571 if (dom_info_available_p (CDI_DOMINATORS))
573 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
574 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
577 /* Build our local variables. */
578 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
579 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
580 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
582 /* Build: __mf_base = (uintptr_t) <base address expression>. */
583 seq = gimple_seq_alloc ();
584 t = fold_convert_loc (location, mf_uintptr_type,
585 unshare_expr (base));
586 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
587 gimple_seq_add_seq (&seq, stmts);
588 g = gimple_build_assign (mf_base, t);
589 gimple_set_location (g, location);
590 gimple_seq_add_stmt (&seq, g);
592 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
593 t = fold_convert_loc (location, mf_uintptr_type,
594 unshare_expr (limit));
595 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
596 gimple_seq_add_seq (&seq, stmts);
597 g = gimple_build_assign (mf_limit, t);
598 gimple_set_location (g, location);
599 gimple_seq_add_stmt (&seq, g);
601 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
602 & __mf_mask]. */
603 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
604 flag_mudflap_threads ? mf_cache_shift_decl
605 : mf_cache_shift_decl_l);
606 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
607 flag_mudflap_threads ? mf_cache_mask_decl
608 : mf_cache_mask_decl_l);
609 t = build4 (ARRAY_REF,
610 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
611 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
612 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
613 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
614 gimple_seq_add_seq (&seq, stmts);
615 g = gimple_build_assign (mf_elem, t);
616 gimple_set_location (g, location);
617 gimple_seq_add_stmt (&seq, g);
619 /* Quick validity check.
621 if (__mf_elem->low > __mf_base
622 || (__mf_elem_high < __mf_limit))
624 __mf_check ();
625 ... and only if single-threaded:
626 __mf_lookup_shift_1 = f...;
627 __mf_lookup_mask_l = ...;
630 It is expected that this body of code is rarely executed so we mark
631 the edge to the THEN clause of the conditional jump as unlikely. */
633 /* Construct t <-- '__mf_elem->low > __mf_base'. */
634 t = build3 (COMPONENT_REF, mf_uintptr_type,
635 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
636 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
637 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
639 /* Construct '__mf_elem->high < __mf_limit'.
641 First build:
642 1) u <-- '__mf_elem->high'
643 2) v <-- '__mf_limit'.
645 Then build 'u <-- (u < v). */
647 u = build3 (COMPONENT_REF, mf_uintptr_type,
648 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
649 TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
651 v = mf_limit;
653 u = build2 (LT_EXPR, boolean_type_node, u, v);
655 /* Build the composed conditional: t <-- 't || u'. Then store the
656 result of the evaluation of 't' in a temporary variable which we
657 can use as the condition for the conditional jump. */
658 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
659 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
660 gimple_seq_add_seq (&seq, stmts);
661 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
662 g = gimple_build_assign (cond, t);
663 gimple_set_location (g, location);
664 gimple_seq_add_stmt (&seq, g);
666 /* Build the conditional jump. 'cond' is just a temporary so we can
667 simply build a void COND_EXPR. We do need labels in both arms though. */
668 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
669 NULL_TREE);
670 gimple_set_location (g, location);
671 gimple_seq_add_stmt (&seq, g);
673 /* At this point, after so much hard work, we have only constructed
674 the conditional jump,
676 if (__mf_elem->low > __mf_base
677 || (__mf_elem_high < __mf_limit))
679 The lowered GIMPLE tree representing this code is in the statement
680 list starting at 'head'.
682 We can insert this now in the current basic block, i.e. the one that
683 the statement we're instrumenting was originally in. */
684 gsi = gsi_last_bb (cond_bb);
685 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
687 /* Now build up the body of the cache-miss handling:
689 __mf_check();
690 refresh *_l vars.
692 This is the body of the conditional. */
694 seq = gimple_seq_alloc ();
695 /* u is a string, so it is already a gimple value. */
696 u = mf_file_function_line_tree (location);
697 /* NB: we pass the overall [base..limit] range to mf_check. */
698 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
699 fold_build2_loc (location,
700 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
701 build_int_cst (mf_uintptr_type, 1));
702 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
703 gimple_seq_add_seq (&seq, stmts);
704 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
705 gimple_seq_add_stmt (&seq, g);
707 if (! flag_mudflap_threads)
709 if (stmt_ends_bb_p (g))
711 gsi = gsi_start_bb (then_bb);
712 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
713 e = split_block (then_bb, g);
714 then_bb = e->dest;
715 seq = gimple_seq_alloc ();
718 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
719 gimple_seq_add_stmt (&seq, g);
721 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
722 gimple_seq_add_stmt (&seq, g);
725 /* Insert the check code in the THEN block. */
726 gsi = gsi_start_bb (then_bb);
727 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
729 *instr_gsi = gsi_start_bb (join_bb);
733 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
734 eligible for instrumentation. For the mudflap1 pass, this implies
735 that it should be registered with the libmudflap runtime. For the
736 mudflap2 pass this means instrumenting an indirection operation with
737 respect to the object.
739 static int
740 mf_decl_eligible_p (tree decl)
742 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
743 /* The decl must have its address taken. In the case of
744 arrays, this flag is also set if the indexes are not
745 compile-time known valid constants. */
746 /* XXX: not sufficient: return-by-value structs! */
747 && TREE_ADDRESSABLE (decl)
748 /* The type of the variable must be complete. */
749 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
750 /* The decl hasn't been decomposed somehow. */
751 && !DECL_HAS_VALUE_EXPR_P (decl));
755 static void
756 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
757 location_t location, tree dirflag)
759 tree type, base, limit, addr, size, t;
761 /* Don't instrument read operations. */
762 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
763 return;
765 /* Don't instrument marked nodes. */
766 if (mf_marked_p (*tp))
767 return;
769 t = *tp;
770 type = TREE_TYPE (t);
772 if (type == error_mark_node)
773 return;
775 size = TYPE_SIZE_UNIT (type);
777 switch (TREE_CODE (t))
779 case ARRAY_REF:
780 case COMPONENT_REF:
782 /* This is trickier than it may first appear. The reason is
783 that we are looking at expressions from the "inside out" at
784 this point. We may have a complex nested aggregate/array
785 expression (e.g. "a.b[i].c"), maybe with an indirection as
786 the leftmost operator ("p->a.b.d"), where instrumentation
787 is necessary. Or we may have an innocent "a.b.c"
788 expression that must not be instrumented. We need to
789 recurse all the way down the nesting structure to figure it
790 out: looking just at the outer node is not enough. */
791 tree var;
792 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
793 /* If we have a bitfield component reference, we must note the
794 innermost addressable object in ELT, from which we will
795 construct the byte-addressable bounds of the bitfield. */
796 tree elt = NULL_TREE;
797 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
798 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
800 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
801 containment hierarchy to find the outermost VAR_DECL. */
802 var = TREE_OPERAND (t, 0);
803 while (1)
805 if (bitfield_ref_p && elt == NULL_TREE
806 && (TREE_CODE (var) == ARRAY_REF
807 || TREE_CODE (var) == COMPONENT_REF))
808 elt = var;
810 if (TREE_CODE (var) == ARRAY_REF)
812 component_ref_only = 0;
813 var = TREE_OPERAND (var, 0);
815 else if (TREE_CODE (var) == COMPONENT_REF)
816 var = TREE_OPERAND (var, 0);
817 else if (INDIRECT_REF_P (var))
819 base = TREE_OPERAND (var, 0);
820 break;
822 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
824 var = TREE_OPERAND (var, 0);
825 if (CONSTANT_CLASS_P (var)
826 && TREE_CODE (var) != STRING_CST)
827 return;
829 else
831 gcc_assert (TREE_CODE (var) == VAR_DECL
832 || TREE_CODE (var) == PARM_DECL
833 || TREE_CODE (var) == RESULT_DECL
834 || TREE_CODE (var) == STRING_CST);
835 /* Don't instrument this access if the underlying
836 variable is not "eligible". This test matches
837 those arrays that have only known-valid indexes,
838 and thus are not labeled TREE_ADDRESSABLE. */
839 if (! mf_decl_eligible_p (var) || component_ref_only)
840 return;
841 else
843 base = build1 (ADDR_EXPR,
844 build_pointer_type (TREE_TYPE (var)), var);
845 break;
850 /* Handle the case of ordinary non-indirection structure
851 accesses. These have only nested COMPONENT_REF nodes (no
852 INDIRECT_REF), but pass through the above filter loop.
853 Note that it's possible for such a struct variable to match
854 the eligible_p test because someone else might take its
855 address sometime. */
857 /* We need special processing for bitfield components, because
858 their addresses cannot be taken. */
859 if (bitfield_ref_p)
861 tree field = TREE_OPERAND (t, 1);
863 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
864 size = DECL_SIZE_UNIT (field);
866 if (elt)
867 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
868 elt);
869 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
870 addr = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
871 addr, fold_convert_loc (location, sizetype,
872 byte_position (field)));
874 else
875 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
877 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
878 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
879 convert (mf_uintptr_type, addr),
880 size),
881 integer_one_node);
883 break;
885 case INDIRECT_REF:
886 addr = TREE_OPERAND (t, 0);
887 base = addr;
888 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
889 fold_build2_loc (location,
890 POINTER_PLUS_EXPR, ptr_type_node, base,
891 size),
892 size_int (-1));
893 break;
895 case TARGET_MEM_REF:
896 addr = tree_mem_ref_addr (ptr_type_node, t);
897 base = addr;
898 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
899 fold_build2_loc (location,
900 POINTER_PLUS_EXPR, ptr_type_node, base,
901 size),
902 size_int (-1));
903 break;
905 case ARRAY_RANGE_REF:
906 warning (OPT_Wmudflap,
907 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
908 return;
910 case BIT_FIELD_REF:
911 /* ??? merge with COMPONENT_REF code above? */
913 tree ofs, rem, bpu;
915 /* If we're not dereferencing something, then the access
916 must be ok. */
917 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
918 return;
920 bpu = bitsize_int (BITS_PER_UNIT);
921 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
922 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
923 ofs = fold_convert_loc (location,
924 sizetype,
925 size_binop_loc (location,
926 TRUNC_DIV_EXPR, ofs, bpu));
928 size = convert (bitsizetype, TREE_OPERAND (t, 1));
929 size = size_binop_loc (location, PLUS_EXPR, size, rem);
930 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
931 size = convert (sizetype, size);
933 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
934 addr = convert (ptr_type_node, addr);
935 addr = fold_build2_loc (location, POINTER_PLUS_EXPR,
936 ptr_type_node, addr, ofs);
938 base = addr;
939 limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
940 fold_build2_loc (location,
941 POINTER_PLUS_EXPR, ptr_type_node,
942 base, size),
943 size_int (-1));
945 break;
947 default:
948 return;
951 mf_build_check_statement_for (base, limit, iter, location, dirflag);
953 /* Transform
954 1) Memory references.
955 2) BUILTIN_ALLOCA calls.
957 static void
958 mf_xform_statements (void)
960 basic_block bb, next;
961 gimple_stmt_iterator i;
962 int saved_last_basic_block = last_basic_block;
963 enum gimple_rhs_class grhs_class;
965 bb = ENTRY_BLOCK_PTR ->next_bb;
968 next = bb->next_bb;
969 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
971 gimple s = gsi_stmt (i);
973 /* Only a few GIMPLE statements can reference memory. */
974 switch (gimple_code (s))
976 case GIMPLE_ASSIGN:
977 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
978 gimple_location (s), integer_one_node);
979 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
980 gimple_location (s), integer_zero_node);
981 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
982 if (grhs_class == GIMPLE_BINARY_RHS)
983 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
984 gimple_location (s), integer_zero_node);
985 break;
987 case GIMPLE_RETURN:
988 if (gimple_return_retval (s) != NULL_TREE)
990 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
991 gimple_location (s),
992 integer_zero_node);
994 break;
996 case GIMPLE_CALL:
998 tree fndecl = gimple_call_fndecl (s);
999 if (fndecl && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA))
1000 gimple_call_set_cannot_inline (s, true);
1002 break;
1004 default:
1008 bb = next;
1010 while (bb && bb->index <= saved_last_basic_block);
1013 /* ------------------------------------------------------------------------ */
1014 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
1015 transforms on the current function.
1017 This is the first part of the mudflap instrumentation. It works on
1018 high-level GIMPLE because after lowering, all variables are moved out
1019 of their BIND_EXPR binding context, and we lose liveness information
1020 for the declarations we wish to instrument. */
1022 static unsigned int
1023 execute_mudflap_function_decls (void)
1025 struct gimplify_ctx gctx;
1027 /* Don't instrument functions such as the synthetic constructor
1028 built during mudflap_finish_file. */
1029 if (mf_marked_p (current_function_decl) ||
1030 DECL_ARTIFICIAL (current_function_decl))
1031 return 0;
1033 push_gimplify_context (&gctx);
1035 mf_xform_decls (gimple_body (current_function_decl),
1036 DECL_ARGUMENTS (current_function_decl));
1038 pop_gimplify_context (NULL);
1039 return 0;
1042 /* This struct is passed between mf_xform_decls to store state needed
1043 during the traversal searching for objects that have their
1044 addresses taken. */
1045 struct mf_xform_decls_data
1047 tree param_decls;
1051 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1052 _DECLs if appropriate. Arrange to call the __mf_register function
1053 now, and the __mf_unregister function later for each. Return the
1054 gimple sequence after synthesis. */
1055 gimple_seq
1056 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1058 gimple_seq finally_stmts = NULL;
1059 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1061 while (decl != NULL_TREE)
1063 if (mf_decl_eligible_p (decl)
1064 /* Not already processed. */
1065 && ! mf_marked_p (decl)
1066 /* Automatic variable. */
1067 && ! DECL_EXTERNAL (decl)
1068 && ! TREE_STATIC (decl))
1070 tree size = NULL_TREE, variable_name;
1071 gimple unregister_fncall, register_fncall;
1072 tree unregister_fncall_param, register_fncall_param;
1074 /* Variable-sized objects should have sizes already been
1075 gimplified when we got here. */
1076 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1077 gcc_assert (is_gimple_val (size));
1080 unregister_fncall_param =
1081 mf_mark (build1 (ADDR_EXPR,
1082 build_pointer_type (TREE_TYPE (decl)),
1083 decl));
1084 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1085 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1086 unregister_fncall_param,
1087 size,
1088 build_int_cst (NULL_TREE, 3));
1091 variable_name = mf_varname_tree (decl);
1092 register_fncall_param =
1093 mf_mark (build1 (ADDR_EXPR,
1094 build_pointer_type (TREE_TYPE (decl)),
1095 decl));
1096 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1097 "name") */
1098 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1099 register_fncall_param,
1100 size,
1101 build_int_cst (NULL_TREE, 3),
1102 variable_name);
1105 /* Accumulate the two calls. */
1106 gimple_set_location (register_fncall, location);
1107 gimple_set_location (unregister_fncall, location);
1109 /* Add the __mf_register call at the current appending point. */
1110 if (gsi_end_p (initially_stmts))
1112 if (!DECL_ARTIFICIAL (decl))
1113 warning (OPT_Wmudflap,
1114 "mudflap cannot track %qE in stub function",
1115 DECL_NAME (decl));
1117 else
1119 gsi_insert_before (&initially_stmts, register_fncall,
1120 GSI_SAME_STMT);
1122 /* Accumulate the FINALLY piece. */
1123 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1125 mf_mark (decl);
1128 decl = TREE_CHAIN (decl);
1131 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1132 if (finally_stmts != NULL)
1134 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1135 gimple_seq new_seq = gimple_seq_alloc ();
1137 gimple_seq_add_stmt (&new_seq, stmt);
1138 return new_seq;
1140 else
1141 return seq;
1145 /* Process every variable mentioned in BIND_EXPRs. */
1146 static tree
1147 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1148 bool *handled_operands_p ATTRIBUTE_UNUSED,
1149 struct walk_stmt_info *wi)
1151 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1152 gimple stmt = gsi_stmt (*gsi);
1154 switch (gimple_code (stmt))
1156 case GIMPLE_BIND:
1158 /* Process function parameters now (but only once). */
1159 if (d->param_decls)
1161 gimple_bind_set_body (stmt,
1162 mx_register_decls (d->param_decls,
1163 gimple_bind_body (stmt),
1164 gimple_location (stmt)));
1165 d->param_decls = NULL_TREE;
1168 gimple_bind_set_body (stmt,
1169 mx_register_decls (gimple_bind_vars (stmt),
1170 gimple_bind_body (stmt),
1171 gimple_location (stmt)));
1173 break;
1175 default:
1176 break;
1179 return NULL_TREE;
1182 /* Perform the object lifetime tracking mudflap transform on the given function
1183 tree. The tree is mutated in place, with possibly copied subtree nodes.
1185 For every auto variable declared, if its address is ever taken
1186 within the function, then supply its lifetime to the mudflap
1187 runtime with the __mf_register and __mf_unregister calls.
1190 static void
1191 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1193 struct mf_xform_decls_data d;
1194 struct walk_stmt_info wi;
1195 struct pointer_set_t *pset = pointer_set_create ();
1197 d.param_decls = fnparams;
1198 memset (&wi, 0, sizeof (wi));
1199 wi.info = (void*) &d;
1200 wi.pset = pset;
1201 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1202 pointer_set_destroy (pset);
1206 /* ------------------------------------------------------------------------ */
1207 /* Externally visible mudflap functions. */
1210 /* Mark and return the given tree node to prevent further mudflap
1211 transforms. */
1212 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1214 tree
1215 mf_mark (tree t)
1217 void **slot;
1219 if (marked_trees == NULL)
1220 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1221 NULL);
1223 slot = htab_find_slot (marked_trees, t, INSERT);
1224 *slot = t;
1225 return t;
1229 mf_marked_p (tree t)
1231 void *entry;
1233 if (marked_trees == NULL)
1234 return 0;
1236 entry = htab_find (marked_trees, t);
1237 return (entry != NULL);
1240 /* Remember given node as a static of some kind: global data,
1241 function-scope static, or an anonymous constant. Its assembler
1242 label is given. */
1244 /* A list of globals whose incomplete declarations we encountered.
1245 Instead of emitting the __mf_register call for them here, it's
1246 delayed until program finish time. If they're still incomplete by
1247 then, warnings are emitted. */
1249 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1251 /* A list of statements for calling __mf_register() at startup time. */
1252 static GTY (()) tree enqueued_call_stmt_chain;
1254 static void
1255 mudflap_register_call (tree obj, tree object_size, tree varname)
1257 tree arg, call_stmt;
1259 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1260 arg = convert (ptr_type_node, arg);
1262 call_stmt = build_call_expr (mf_register_fndecl, 4,
1263 arg,
1264 convert (size_type_node, object_size),
1265 /* __MF_TYPE_STATIC */
1266 build_int_cst (NULL_TREE, 4),
1267 varname);
1269 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1272 void
1273 mudflap_enqueue_decl (tree obj)
1275 if (mf_marked_p (obj))
1276 return;
1278 /* We don't need to process variable decls that are internally
1279 generated extern. If we did, we'd end up with warnings for them
1280 during mudflap_finish_file (). That would confuse the user,
1281 since the text would refer to variables that don't show up in the
1282 user's source code. */
1283 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1284 return;
1286 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1290 void
1291 mudflap_enqueue_constant (tree obj)
1293 tree object_size, varname;
1295 if (mf_marked_p (obj))
1296 return;
1298 if (TREE_CODE (obj) == STRING_CST)
1299 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1300 else
1301 object_size = size_in_bytes (TREE_TYPE (obj));
1303 if (TREE_CODE (obj) == STRING_CST)
1304 varname = mf_build_string ("string literal");
1305 else
1306 varname = mf_build_string ("constant");
1308 mudflap_register_call (obj, object_size, varname);
1312 /* Emit any file-wide instrumentation. */
1313 void
1314 mudflap_finish_file (void)
1316 tree ctor_statements = NULL_TREE;
1318 /* No need to continue when there were errors. */
1319 if (errorcount != 0 || sorrycount != 0)
1320 return;
1322 /* Insert a call to __mf_init. */
1324 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1325 append_to_statement_list (call2_stmt, &ctor_statements);
1328 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1329 if (flag_mudflap_ignore_reads)
1331 tree arg = mf_build_string ("-ignore-reads");
1332 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1333 append_to_statement_list (call_stmt, &ctor_statements);
1336 /* Process all enqueued object decls. */
1337 if (deferred_static_decls)
1339 size_t i;
1340 tree obj;
1341 for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++)
1343 gcc_assert (DECL_P (obj));
1345 if (mf_marked_p (obj))
1346 continue;
1348 /* Omit registration for static unaddressed objects. NB:
1349 Perform registration for non-static objects regardless of
1350 TREE_USED or TREE_ADDRESSABLE, because they may be used
1351 from other compilation units. */
1352 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1353 continue;
1355 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1357 warning (OPT_Wmudflap,
1358 "mudflap cannot track unknown size extern %qE",
1359 DECL_NAME (obj));
1360 continue;
1363 mudflap_register_call (obj,
1364 size_in_bytes (TREE_TYPE (obj)),
1365 mf_varname_tree (obj));
1368 VEC_truncate (tree, deferred_static_decls, 0);
1371 /* Append all the enqueued registration calls. */
1372 if (enqueued_call_stmt_chain)
1374 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1375 enqueued_call_stmt_chain = NULL_TREE;
1378 cgraph_build_static_cdtor ('I', ctor_statements,
1379 MAX_RESERVED_INIT_PRIORITY-1);
1383 static bool
1384 gate_mudflap (void)
1386 return flag_mudflap != 0;
1389 struct gimple_opt_pass pass_mudflap_1 =
1392 GIMPLE_PASS,
1393 "mudflap1", /* name */
1394 gate_mudflap, /* gate */
1395 execute_mudflap_function_decls, /* execute */
1396 NULL, /* sub */
1397 NULL, /* next */
1398 0, /* static_pass_number */
1399 TV_NONE, /* tv_id */
1400 PROP_gimple_any, /* properties_required */
1401 0, /* properties_provided */
1402 0, /* properties_destroyed */
1403 0, /* todo_flags_start */
1404 TODO_dump_func /* todo_flags_finish */
1408 struct gimple_opt_pass pass_mudflap_2 =
1411 GIMPLE_PASS,
1412 "mudflap2", /* name */
1413 gate_mudflap, /* gate */
1414 execute_mudflap_function_ops, /* execute */
1415 NULL, /* sub */
1416 NULL, /* next */
1417 0, /* static_pass_number */
1418 TV_NONE, /* tv_id */
1419 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1420 0, /* properties_provided */
1421 0, /* properties_destroyed */
1422 0, /* todo_flags_start */
1423 TODO_verify_flow | TODO_verify_stmts
1424 | TODO_dump_func | TODO_update_ssa /* todo_flags_finish */
1428 #include "gt-tree-mudflap.h"