* config/rl78/rl78.c (rl78_alloc_address_registers_macax): Verify
[official-gcc.git] / gcc / tree-mudflap.c
blob9616762f24ed051518931d70e884504674d3158a
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002-2013 Free Software Foundation, Inc.
3 Contributed by Frank Ch. Eigler <fche@redhat.com>
4 and Graydon Hoare <graydon@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "tree-inline.h"
33 #include "gimple.h"
34 #include "tree-iterator.h"
35 #include "tree-ssa.h"
36 #include "tree-mudflap.h"
37 #include "tree-pass.h"
38 #include "hashtab.h"
39 #include "diagnostic.h"
40 #include "demangle.h"
41 #include "langhooks.h"
42 #include "ggc.h"
43 #include "cgraph.h"
44 #include "gimple.h"
46 extern void add_bb_to_loop (basic_block, struct loop *);
48 /* Internal function decls */
51 /* Options. */
52 #define flag_mudflap_threads (flag_mudflap == 2)
54 /* Helpers. */
55 static tree mf_build_string (const char *string);
56 static tree mf_varname_tree (tree);
57 static tree mf_file_function_line_tree (location_t);
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq, tree);
67 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68 struct walk_stmt_info *);
69 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70 static unsigned int execute_mudflap_function_decls (void);
72 /* Return true if DECL is artificial stub that shouldn't be instrumented by
73 mf. We should instrument clones of non-artificial functions. */
74 static inline bool
75 mf_artificial (const_tree decl)
77 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
80 /* ------------------------------------------------------------------------ */
81 /* Some generally helpful functions for mudflap instrumentation. */
83 /* Build a reference to a literal string. */
84 static tree
85 mf_build_string (const char *string)
87 size_t len = strlen (string);
88 tree result = mf_mark (build_string (len + 1, string));
90 TREE_TYPE (result) = build_array_type
91 (char_type_node, build_index_type (size_int (len)));
92 TREE_CONSTANT (result) = 1;
93 TREE_READONLY (result) = 1;
94 TREE_STATIC (result) = 1;
96 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
98 return mf_mark (result);
101 /* Create a properly typed STRING_CST node that describes the given
102 declaration. It will be used as an argument for __mf_register().
103 Try to construct a helpful string, including file/function/variable
104 name. */
106 static tree
107 mf_varname_tree (tree decl)
109 const char *buf_contents;
110 tree result;
112 gcc_assert (decl);
114 pretty_printer buf;
116 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
118 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
119 const char *sourcefile;
120 unsigned sourceline = xloc.line;
121 unsigned sourcecolumn = 0;
122 sourcecolumn = xloc.column;
123 sourcefile = xloc.file;
124 if (sourcefile == NULL && current_function_decl != NULL_TREE)
125 sourcefile = DECL_SOURCE_FILE (current_function_decl);
126 if (sourcefile == NULL)
127 sourcefile = "<unknown file>";
129 pp_string (&buf, sourcefile);
131 if (sourceline != 0)
133 pp_colon (&buf);
134 pp_decimal_int (&buf, sourceline);
136 if (sourcecolumn != 0)
138 pp_colon (&buf);
139 pp_decimal_int (&buf, sourcecolumn);
144 if (current_function_decl != NULL_TREE)
146 /* Add (FUNCTION) */
147 pp_string (&buf, " (");
149 const char *funcname = NULL;
150 if (DECL_NAME (current_function_decl))
151 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
152 if (funcname == NULL)
153 funcname = "anonymous fn";
155 pp_string (&buf, funcname);
157 pp_string (&buf, ") ");
159 else
160 pp_space (&buf);
162 /* Add <variable-declaration>, possibly demangled. */
164 const char *declname = NULL;
166 if (DECL_NAME (decl) != NULL)
168 if (strcmp ("GNU C++", lang_hooks.name) == 0)
170 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
171 the libiberty demangler. */
172 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
173 DMGL_AUTO | DMGL_VERBOSE);
175 if (declname == NULL)
176 declname = lang_hooks.decl_printable_name (decl, 3);
178 if (declname == NULL)
179 declname = "<unnamed variable>";
181 pp_string (&buf, declname);
184 /* Return the lot as a new STRING_CST. */
185 buf_contents = ggc_strdup (pp_formatted_text (&buf));
186 result = mf_build_string (buf_contents);
187 pp_clear_output_area (&buf);
189 return result;
193 /* And another friend, for producing a simpler message. */
195 static tree
196 mf_file_function_line_tree (location_t location)
198 expanded_location xloc = expand_location (location);
199 const char *file = NULL, *colon, *line, *op, *name, *cp;
200 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
201 char *string;
202 tree result;
204 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
205 file = xloc.file;
206 if (file == NULL && current_function_decl != NULL_TREE)
207 file = DECL_SOURCE_FILE (current_function_decl);
208 if (file == NULL)
209 file = "<unknown file>";
211 if (xloc.line > 0)
213 if (xloc.column > 0)
214 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
215 else
216 sprintf (linecolbuf, "%d", xloc.line);
217 colon = ":";
218 line = linecolbuf;
220 else
221 colon = line = "";
223 /* Add (FUNCTION). */
224 name = lang_hooks.decl_printable_name (current_function_decl, 1);
225 if (name)
227 op = " (";
228 cp = ")";
230 else
231 op = name = cp = "";
233 string = concat (file, colon, line, op, name, cp, NULL);
234 result = mf_build_string (string);
235 free (string);
237 return result;
241 /* global tree nodes */
243 /* Global tree objects for global variables and functions exported by
244 mudflap runtime library. mf_init_extern_trees must be called
245 before using these. */
247 /* uintptr_t (usually "unsigned long") */
248 static GTY (()) tree mf_uintptr_type;
250 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
251 static GTY (()) tree mf_cache_struct_type;
253 /* struct __mf_cache * const */
254 static GTY (()) tree mf_cache_structptr_type;
256 /* extern struct __mf_cache __mf_lookup_cache []; */
257 static GTY (()) tree mf_cache_array_decl;
259 /* extern unsigned char __mf_lc_shift; */
260 static GTY (()) tree mf_cache_shift_decl;
262 /* extern uintptr_t __mf_lc_mask; */
263 static GTY (()) tree mf_cache_mask_decl;
265 /* Their function-scope local shadows, used in single-threaded mode only. */
267 /* auto const unsigned char __mf_lc_shift_l; */
268 static GTY (()) tree mf_cache_shift_decl_l;
270 /* auto const uintptr_t __mf_lc_mask_l; */
271 static GTY (()) tree mf_cache_mask_decl_l;
273 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
274 static GTY (()) tree mf_check_fndecl;
276 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
277 static GTY (()) tree mf_register_fndecl;
279 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
280 static GTY (()) tree mf_unregister_fndecl;
282 /* extern void __mf_init (); */
283 static GTY (()) tree mf_init_fndecl;
285 /* extern int __mf_set_options (const char*); */
286 static GTY (()) tree mf_set_options_fndecl;
289 /* Helper for mudflap_init: construct a decl with the given category,
290 name, and type, mark it an external reference, and pushdecl it. */
291 static inline tree
292 mf_make_builtin (enum tree_code category, const char *name, tree type)
294 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
295 category, get_identifier (name), type));
296 TREE_PUBLIC (decl) = 1;
297 DECL_EXTERNAL (decl) = 1;
298 lang_hooks.decls.pushdecl (decl);
299 /* The decl was declared by the compiler. */
300 DECL_ARTIFICIAL (decl) = 1;
301 /* And we don't want debug info for it. */
302 DECL_IGNORED_P (decl) = 1;
303 return decl;
306 /* Helper for mudflap_init: construct a tree corresponding to the type
307 struct __mf_cache { uintptr_t low; uintptr_t high; };
308 where uintptr_t is the FIELD_TYPE argument. */
309 static inline tree
310 mf_make_mf_cache_struct_type (tree field_type)
312 /* There is, abominably, no language-independent way to construct a
313 RECORD_TYPE. So we have to call the basic type construction
314 primitives by hand. */
315 tree fieldlo = build_decl (UNKNOWN_LOCATION,
316 FIELD_DECL, get_identifier ("low"), field_type);
317 tree fieldhi = build_decl (UNKNOWN_LOCATION,
318 FIELD_DECL, get_identifier ("high"), field_type);
320 tree struct_type = make_node (RECORD_TYPE);
321 DECL_CONTEXT (fieldlo) = struct_type;
322 DECL_CONTEXT (fieldhi) = struct_type;
323 DECL_CHAIN (fieldlo) = fieldhi;
324 TYPE_FIELDS (struct_type) = fieldlo;
325 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
326 layout_type (struct_type);
328 return struct_type;
331 /* Initialize the global tree nodes that correspond to mf-runtime.h
332 declarations. */
333 void
334 mudflap_init (void)
336 static bool done = false;
337 tree mf_const_string_type;
338 tree mf_cache_array_type;
339 tree mf_check_register_fntype;
340 tree mf_unregister_fntype;
341 tree mf_init_fntype;
342 tree mf_set_options_fntype;
344 if (done)
345 return;
346 done = true;
348 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
349 /*unsignedp=*/true);
350 mf_const_string_type
351 = build_pointer_type (build_qualified_type
352 (char_type_node, TYPE_QUAL_CONST));
354 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
355 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
356 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
357 mf_check_register_fntype =
358 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
359 integer_type_node, mf_const_string_type, NULL_TREE);
360 mf_unregister_fntype =
361 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
362 integer_type_node, NULL_TREE);
363 mf_init_fntype =
364 build_function_type_list (void_type_node, NULL_TREE);
365 mf_set_options_fntype =
366 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
368 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
369 mf_cache_array_type);
370 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
371 unsigned_char_type_node);
372 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
373 mf_uintptr_type);
374 /* Don't process these in mudflap_enqueue_decl, should they come by
375 there for some reason. */
376 mf_mark (mf_cache_array_decl);
377 mf_mark (mf_cache_shift_decl);
378 mf_mark (mf_cache_mask_decl);
379 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
380 mf_check_register_fntype);
381 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
382 mf_check_register_fntype);
383 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
384 mf_unregister_fntype);
385 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
386 mf_init_fntype);
387 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
388 mf_set_options_fntype);
392 /* ------------------------------------------------------------------------ */
393 /* This is the second part of the mudflap instrumentation. It works on
394 low-level GIMPLE using the CFG, because we want to run this pass after
395 tree optimizations have been performed, but we have to preserve the CFG
396 for expansion from trees to RTL.
397 Below is the list of transformations performed on statements in the
398 current function.
400 1) Memory reference transforms: Perform the mudflap indirection-related
401 tree transforms on memory references.
403 2) Mark BUILTIN_ALLOCA calls not inlineable.
407 static unsigned int
408 execute_mudflap_function_ops (void)
410 struct gimplify_ctx gctx;
412 /* Don't instrument functions such as the synthetic constructor
413 built during mudflap_finish_file. */
414 if (mf_marked_p (current_function_decl)
415 || mf_artificial (current_function_decl))
416 return 0;
418 push_gimplify_context (&gctx);
420 /* In multithreaded mode, don't cache the lookup cache parameters. */
421 if (! flag_mudflap_threads)
422 mf_decl_cache_locals ();
424 mf_xform_statements ();
426 if (! flag_mudflap_threads)
427 mf_decl_clear_locals ();
429 pop_gimplify_context (NULL);
430 return 0;
433 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
434 if BB has more than one edge, STMT will be replicated for each edge.
435 Also, abnormal edges will be ignored. */
437 static void
438 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
440 edge e;
441 edge_iterator ei;
442 unsigned n_copies = -1;
444 FOR_EACH_EDGE (e, ei, bb->succs)
445 if (!(e->flags & EDGE_ABNORMAL))
446 n_copies++;
448 FOR_EACH_EDGE (e, ei, bb->succs)
449 if (!(e->flags & EDGE_ABNORMAL))
450 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
453 /* Create and initialize local shadow variables for the lookup cache
454 globals. Put their decls in the *_l globals for use by
455 mf_build_check_statement_for. */
457 static void
458 mf_decl_cache_locals (void)
460 gimple g;
461 gimple_seq seq = NULL;
463 /* Build the cache vars. */
464 mf_cache_shift_decl_l
465 = mf_mark (create_tmp_reg (TREE_TYPE (mf_cache_shift_decl),
466 "__mf_lookup_shift_l"));
468 mf_cache_mask_decl_l
469 = mf_mark (create_tmp_reg (TREE_TYPE (mf_cache_mask_decl),
470 "__mf_lookup_mask_l"));
472 /* Build initialization nodes for the cache vars. We just load the
473 globals into the cache variables. */
474 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
475 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
476 gimple_seq_add_stmt (&seq, g);
478 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
479 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
480 gimple_seq_add_stmt (&seq, g);
482 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
484 gsi_commit_edge_inserts ();
488 static void
489 mf_decl_clear_locals (void)
491 /* Unset local shadows. */
492 mf_cache_shift_decl_l = NULL_TREE;
493 mf_cache_mask_decl_l = NULL_TREE;
496 static void
497 mf_build_check_statement_for (tree base, tree limit,
498 gimple_stmt_iterator *instr_gsi,
499 location_t location, tree dirflag)
501 gimple_stmt_iterator gsi;
502 basic_block cond_bb, then_bb, join_bb;
503 edge e;
504 tree cond, t, u, v;
505 tree mf_base;
506 tree mf_elem;
507 tree mf_limit;
508 gimple g;
509 gimple_seq seq, stmts;
511 /* We first need to split the current basic block, and start altering
512 the CFG. This allows us to insert the statements we're about to
513 construct into the right basic blocks. */
515 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
516 gsi = *instr_gsi;
517 gsi_prev (&gsi);
518 if (! gsi_end_p (gsi))
519 e = split_block (cond_bb, gsi_stmt (gsi));
520 else
521 e = split_block_after_labels (cond_bb);
522 cond_bb = e->src;
523 join_bb = e->dest;
525 /* A recap at this point: join_bb is the basic block at whose head
526 is the gimple statement for which this check expression is being
527 built. cond_bb is the (possibly new, synthetic) basic block the
528 end of which will contain the cache-lookup code, and a
529 conditional that jumps to the cache-miss code or, much more
530 likely, over to join_bb. */
532 /* Create the bb that contains the cache-miss fallback block (mf_check). */
533 then_bb = create_empty_bb (cond_bb);
534 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
535 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
537 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
538 e = find_edge (cond_bb, join_bb);
539 e->flags = EDGE_FALSE_VALUE;
540 e->count = cond_bb->count;
541 e->probability = REG_BR_PROB_BASE;
543 /* Update dominance info. Note that bb_join's data was
544 updated by split_block. */
545 if (dom_info_available_p (CDI_DOMINATORS))
547 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
548 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
551 /* Update loop info. */
552 if (current_loops)
553 add_bb_to_loop (then_bb, cond_bb->loop_father);
555 /* Build our local variables. */
556 mf_elem = create_tmp_reg (mf_cache_structptr_type, "__mf_elem");
557 mf_base = create_tmp_reg (mf_uintptr_type, "__mf_base");
558 mf_limit = create_tmp_reg (mf_uintptr_type, "__mf_limit");
560 /* Build: __mf_base = (uintptr_t) <base address expression>. */
561 seq = NULL;
562 t = fold_convert_loc (location, mf_uintptr_type,
563 unshare_expr (base));
564 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
565 gimple_seq_add_seq (&seq, stmts);
566 g = gimple_build_assign (mf_base, t);
567 gimple_set_location (g, location);
568 gimple_seq_add_stmt (&seq, g);
570 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
571 t = fold_convert_loc (location, mf_uintptr_type,
572 unshare_expr (limit));
573 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
574 gimple_seq_add_seq (&seq, stmts);
575 g = gimple_build_assign (mf_limit, t);
576 gimple_set_location (g, location);
577 gimple_seq_add_stmt (&seq, g);
579 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
580 & __mf_mask]. */
581 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
582 flag_mudflap_threads ? mf_cache_shift_decl
583 : mf_cache_shift_decl_l);
584 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
585 flag_mudflap_threads ? mf_cache_mask_decl
586 : mf_cache_mask_decl_l);
587 t = build4 (ARRAY_REF,
588 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
589 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
590 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
591 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
592 gimple_seq_add_seq (&seq, stmts);
593 g = gimple_build_assign (mf_elem, t);
594 gimple_set_location (g, location);
595 gimple_seq_add_stmt (&seq, g);
597 /* Quick validity check.
599 if (__mf_elem->low > __mf_base
600 || (__mf_elem_high < __mf_limit))
602 __mf_check ();
603 ... and only if single-threaded:
604 __mf_lookup_shift_1 = f...;
605 __mf_lookup_mask_l = ...;
608 It is expected that this body of code is rarely executed so we mark
609 the edge to the THEN clause of the conditional jump as unlikely. */
611 /* Construct t <-- '__mf_elem->low > __mf_base'. */
612 t = build3 (COMPONENT_REF, mf_uintptr_type,
613 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
614 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
615 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
617 /* Construct '__mf_elem->high < __mf_limit'.
619 First build:
620 1) u <-- '__mf_elem->high'
621 2) v <-- '__mf_limit'.
623 Then build 'u <-- (u < v). */
625 u = build3 (COMPONENT_REF, mf_uintptr_type,
626 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
627 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
629 v = mf_limit;
631 u = build2 (LT_EXPR, boolean_type_node, u, v);
633 /* Build the composed conditional: t <-- 't || u'. Then store the
634 result of the evaluation of 't' in a temporary variable which we
635 can use as the condition for the conditional jump. */
636 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
637 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
638 gimple_seq_add_seq (&seq, stmts);
639 cond = create_tmp_reg (boolean_type_node, "__mf_unlikely_cond");
640 g = gimple_build_assign (cond, t);
641 gimple_set_location (g, location);
642 gimple_seq_add_stmt (&seq, g);
644 /* Build the conditional jump. 'cond' is just a temporary so we can
645 simply build a void COND_EXPR. We do need labels in both arms though. */
646 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
647 NULL_TREE);
648 gimple_set_location (g, location);
649 gimple_seq_add_stmt (&seq, g);
651 /* At this point, after so much hard work, we have only constructed
652 the conditional jump,
654 if (__mf_elem->low > __mf_base
655 || (__mf_elem_high < __mf_limit))
657 The lowered GIMPLE tree representing this code is in the statement
658 list starting at 'head'.
660 We can insert this now in the current basic block, i.e. the one that
661 the statement we're instrumenting was originally in. */
662 gsi = gsi_last_bb (cond_bb);
663 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
665 /* Now build up the body of the cache-miss handling:
667 __mf_check();
668 refresh *_l vars.
670 This is the body of the conditional. */
672 seq = NULL;
673 /* u is a string, so it is already a gimple value. */
674 u = mf_file_function_line_tree (location);
675 /* NB: we pass the overall [base..limit] range to mf_check. */
676 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
677 fold_build2_loc (location,
678 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
679 build_int_cst (mf_uintptr_type, 1));
680 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
681 gimple_seq_add_seq (&seq, stmts);
682 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
683 gimple_seq_add_stmt (&seq, g);
685 if (! flag_mudflap_threads)
687 if (stmt_ends_bb_p (g))
689 gsi = gsi_start_bb (then_bb);
690 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
691 e = split_block (then_bb, g);
692 then_bb = e->dest;
693 seq = NULL;
696 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
697 gimple_seq_add_stmt (&seq, g);
699 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
700 gimple_seq_add_stmt (&seq, g);
703 /* Insert the check code in the THEN block. */
704 gsi = gsi_start_bb (then_bb);
705 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
707 *instr_gsi = gsi_start_bb (join_bb);
711 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
712 eligible for instrumentation. For the mudflap1 pass, this implies
713 that it should be registered with the libmudflap runtime. For the
714 mudflap2 pass this means instrumenting an indirection operation with
715 respect to the object.
717 static int
718 mf_decl_eligible_p (tree decl)
720 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
721 /* The decl must have its address taken. In the case of
722 arrays, this flag is also set if the indexes are not
723 compile-time known valid constants. */
724 /* XXX: not sufficient: return-by-value structs! */
725 && TREE_ADDRESSABLE (decl)
726 /* The type of the variable must be complete. */
727 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
728 /* The decl hasn't been decomposed somehow. */
729 && !DECL_HAS_VALUE_EXPR_P (decl));
733 static void
734 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
735 location_t location, tree dirflag)
737 tree type, base, limit, addr, size, t;
739 /* Don't instrument read operations. */
740 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
741 return;
743 /* Don't instrument marked nodes. */
744 if (mf_marked_p (*tp))
745 return;
747 t = *tp;
748 type = TREE_TYPE (t);
750 if (type == error_mark_node)
751 return;
753 size = TYPE_SIZE_UNIT (type);
755 switch (TREE_CODE (t))
757 case ARRAY_REF:
758 case COMPONENT_REF:
760 /* This is trickier than it may first appear. The reason is
761 that we are looking at expressions from the "inside out" at
762 this point. We may have a complex nested aggregate/array
763 expression (e.g. "a.b[i].c"), maybe with an indirection as
764 the leftmost operator ("p->a.b.d"), where instrumentation
765 is necessary. Or we may have an innocent "a.b.c"
766 expression that must not be instrumented. We need to
767 recurse all the way down the nesting structure to figure it
768 out: looking just at the outer node is not enough. */
769 tree var;
770 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
771 /* If we have a bitfield component reference, we must note the
772 innermost addressable object in ELT, from which we will
773 construct the byte-addressable bounds of the bitfield. */
774 tree elt = NULL_TREE;
775 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
776 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
778 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
779 containment hierarchy to find the outermost VAR_DECL. */
780 var = TREE_OPERAND (t, 0);
781 while (1)
783 if (bitfield_ref_p && elt == NULL_TREE
784 && (TREE_CODE (var) == ARRAY_REF
785 || TREE_CODE (var) == COMPONENT_REF))
786 elt = var;
788 if (TREE_CODE (var) == ARRAY_REF)
790 component_ref_only = 0;
791 var = TREE_OPERAND (var, 0);
793 else if (TREE_CODE (var) == COMPONENT_REF)
794 var = TREE_OPERAND (var, 0);
795 else if (INDIRECT_REF_P (var)
796 || TREE_CODE (var) == MEM_REF)
798 base = TREE_OPERAND (var, 0);
799 break;
801 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
803 var = TREE_OPERAND (var, 0);
804 if (CONSTANT_CLASS_P (var)
805 && TREE_CODE (var) != STRING_CST)
806 return;
808 else
810 gcc_assert (TREE_CODE (var) == VAR_DECL
811 || TREE_CODE (var) == PARM_DECL
812 || TREE_CODE (var) == RESULT_DECL
813 || TREE_CODE (var) == STRING_CST);
814 /* Don't instrument this access if the underlying
815 variable is not "eligible". This test matches
816 those arrays that have only known-valid indexes,
817 and thus are not labeled TREE_ADDRESSABLE. */
818 if (! mf_decl_eligible_p (var) || component_ref_only)
819 return;
820 else
822 base = build1 (ADDR_EXPR,
823 build_pointer_type (TREE_TYPE (var)), var);
824 break;
829 /* Handle the case of ordinary non-indirection structure
830 accesses. These have only nested COMPONENT_REF nodes (no
831 INDIRECT_REF), but pass through the above filter loop.
832 Note that it's possible for such a struct variable to match
833 the eligible_p test because someone else might take its
834 address sometime. */
836 /* We need special processing for bitfield components, because
837 their addresses cannot be taken. */
838 if (bitfield_ref_p)
840 tree field = TREE_OPERAND (t, 1);
842 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
843 size = DECL_SIZE_UNIT (field);
845 if (elt)
846 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
847 elt);
848 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
849 addr = fold_build_pointer_plus_loc (location,
850 addr, byte_position (field));
852 else
853 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
855 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
856 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
857 fold_convert (mf_uintptr_type, addr),
858 size),
859 integer_one_node);
861 break;
863 case INDIRECT_REF:
864 addr = TREE_OPERAND (t, 0);
865 base = addr;
866 limit = fold_build_pointer_plus_hwi_loc
867 (location, fold_build_pointer_plus_loc (location, base, size), -1);
868 break;
870 case MEM_REF:
871 if (addr_expr_of_non_mem_decl_p (TREE_OPERAND (t, 0)))
872 return;
874 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
875 TREE_OPERAND (t, 1));
876 base = addr;
877 limit = fold_build_pointer_plus_hwi_loc (location,
878 fold_build_pointer_plus_loc (location,
879 base, size), -1);
880 break;
882 case TARGET_MEM_REF:
883 if (addr_expr_of_non_mem_decl_p (TMR_BASE (t)))
884 return;
886 addr = tree_mem_ref_addr (ptr_type_node, t);
887 base = addr;
888 limit = fold_build_pointer_plus_hwi_loc (location,
889 fold_build_pointer_plus_loc (location,
890 base, size), -1);
891 break;
893 case ARRAY_RANGE_REF:
894 warning (OPT_Wmudflap,
895 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
896 return;
898 case BIT_FIELD_REF:
899 /* ??? merge with COMPONENT_REF code above? */
901 tree ofs, rem, bpu;
903 /* If we're not dereferencing something, then the access
904 must be ok. */
905 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
906 return;
908 bpu = bitsize_int (BITS_PER_UNIT);
909 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
910 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
911 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
913 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
914 size = size_binop_loc (location, PLUS_EXPR, size, rem);
915 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
916 size = fold_convert (sizetype, size);
918 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
919 addr = fold_convert (ptr_type_node, addr);
920 addr = fold_build_pointer_plus_loc (location, addr, ofs);
922 base = addr;
923 limit = fold_build_pointer_plus_hwi_loc (location,
924 fold_build_pointer_plus_loc (location,
925 base, size), -1);
927 break;
929 default:
930 return;
933 mf_build_check_statement_for (base, limit, iter, location, dirflag);
935 /* Transform
936 1) Memory references.
938 static void
939 mf_xform_statements (void)
941 basic_block bb, next;
942 gimple_stmt_iterator i;
943 int saved_last_basic_block = last_basic_block;
944 enum gimple_rhs_class grhs_class;
946 bb = ENTRY_BLOCK_PTR ->next_bb;
949 next = bb->next_bb;
950 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
952 gimple s = gsi_stmt (i);
954 /* Only a few GIMPLE statements can reference memory. */
955 switch (gimple_code (s))
957 case GIMPLE_ASSIGN:
958 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
959 gimple_location (s), integer_one_node);
960 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
961 gimple_location (s), integer_zero_node);
962 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
963 if (grhs_class == GIMPLE_BINARY_RHS)
964 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
965 gimple_location (s), integer_zero_node);
966 break;
968 case GIMPLE_RETURN:
969 if (gimple_return_retval (s) != NULL_TREE)
971 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
972 gimple_location (s),
973 integer_zero_node);
975 break;
977 default:
981 bb = next;
983 while (bb && bb->index <= saved_last_basic_block);
986 /* ------------------------------------------------------------------------ */
987 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
988 transforms on the current function.
990 This is the first part of the mudflap instrumentation. It works on
991 high-level GIMPLE because after lowering, all variables are moved out
992 of their BIND_EXPR binding context, and we lose liveness information
993 for the declarations we wish to instrument. */
995 static unsigned int
996 execute_mudflap_function_decls (void)
998 struct gimplify_ctx gctx;
1000 /* Don't instrument functions such as the synthetic constructor
1001 built during mudflap_finish_file. */
1002 if (mf_marked_p (current_function_decl)
1003 || mf_artificial (current_function_decl))
1004 return 0;
1006 push_gimplify_context (&gctx);
1008 mf_xform_decls (gimple_body (current_function_decl),
1009 DECL_ARGUMENTS (current_function_decl));
1011 pop_gimplify_context (NULL);
1012 return 0;
1015 /* This struct is passed between mf_xform_decls to store state needed
1016 during the traversal searching for objects that have their
1017 addresses taken. */
1018 struct mf_xform_decls_data
1020 tree param_decls;
1024 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1025 _DECLs if appropriate. Arrange to call the __mf_register function
1026 now, and the __mf_unregister function later for each. Return the
1027 gimple sequence after synthesis. */
1028 gimple_seq
1029 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1031 gimple_seq finally_stmts = NULL;
1032 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1034 while (decl != NULL_TREE)
1036 if (mf_decl_eligible_p (decl)
1037 /* Not already processed. */
1038 && ! mf_marked_p (decl)
1039 /* Automatic variable. */
1040 && ! DECL_EXTERNAL (decl)
1041 && ! TREE_STATIC (decl))
1043 tree size = NULL_TREE, variable_name;
1044 gimple unregister_fncall, register_fncall;
1045 tree unregister_fncall_param, register_fncall_param;
1047 /* Variable-sized objects should have sizes already been
1048 gimplified when we got here. */
1049 size = fold_convert (size_type_node,
1050 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1051 gcc_assert (is_gimple_val (size));
1054 unregister_fncall_param =
1055 mf_mark (build1 (ADDR_EXPR,
1056 build_pointer_type (TREE_TYPE (decl)),
1057 decl));
1058 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1059 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1060 unregister_fncall_param,
1061 size,
1062 integer_three_node);
1065 variable_name = mf_varname_tree (decl);
1066 register_fncall_param =
1067 mf_mark (build1 (ADDR_EXPR,
1068 build_pointer_type (TREE_TYPE (decl)),
1069 decl));
1070 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1071 "name") */
1072 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1073 register_fncall_param,
1074 size,
1075 integer_three_node,
1076 variable_name);
1079 /* Accumulate the two calls. */
1080 gimple_set_location (register_fncall, location);
1081 gimple_set_location (unregister_fncall, location);
1083 /* Add the __mf_register call at the current appending point. */
1084 if (gsi_end_p (initially_stmts))
1086 if (!mf_artificial (decl))
1087 warning (OPT_Wmudflap,
1088 "mudflap cannot track %qE in stub function",
1089 DECL_NAME (decl));
1091 else
1093 gsi_insert_before (&initially_stmts, register_fncall,
1094 GSI_SAME_STMT);
1096 /* Accumulate the FINALLY piece. */
1097 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1099 mf_mark (decl);
1102 decl = DECL_CHAIN (decl);
1105 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1106 if (finally_stmts != NULL)
1108 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1109 gimple_seq new_seq = NULL;
1111 gimple_seq_add_stmt (&new_seq, stmt);
1112 return new_seq;
1114 else
1115 return seq;
1119 /* Process every variable mentioned in BIND_EXPRs. */
1120 static tree
1121 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1122 bool *handled_operands_p ATTRIBUTE_UNUSED,
1123 struct walk_stmt_info *wi)
1125 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1126 gimple stmt = gsi_stmt (*gsi);
1128 switch (gimple_code (stmt))
1130 case GIMPLE_BIND:
1132 /* Process function parameters now (but only once). */
1133 if (d->param_decls)
1135 gimple_bind_set_body (stmt,
1136 mx_register_decls (d->param_decls,
1137 gimple_bind_body (stmt),
1138 gimple_location (stmt)));
1139 d->param_decls = NULL_TREE;
1142 gimple_bind_set_body (stmt,
1143 mx_register_decls (gimple_bind_vars (stmt),
1144 gimple_bind_body (stmt),
1145 gimple_location (stmt)));
1147 break;
1149 default:
1150 break;
1153 return NULL_TREE;
1156 /* Perform the object lifetime tracking mudflap transform on the given function
1157 tree. The tree is mutated in place, with possibly copied subtree nodes.
1159 For every auto variable declared, if its address is ever taken
1160 within the function, then supply its lifetime to the mudflap
1161 runtime with the __mf_register and __mf_unregister calls.
1164 static void
1165 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1167 struct mf_xform_decls_data d;
1168 struct walk_stmt_info wi;
1169 struct pointer_set_t *pset = pointer_set_create ();
1171 d.param_decls = fnparams;
1172 memset (&wi, 0, sizeof (wi));
1173 wi.info = (void*) &d;
1174 wi.pset = pset;
1175 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1176 pointer_set_destroy (pset);
1180 /* ------------------------------------------------------------------------ */
1181 /* Externally visible mudflap functions. */
1184 /* Mark and return the given tree node to prevent further mudflap
1185 transforms. */
1186 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1188 tree
1189 mf_mark (tree t)
1191 void **slot;
1193 if (marked_trees == NULL)
1194 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1195 NULL);
1197 slot = htab_find_slot (marked_trees, t, INSERT);
1198 *slot = t;
1199 return t;
1203 mf_marked_p (tree t)
1205 void *entry;
1207 if (marked_trees == NULL)
1208 return 0;
1210 entry = htab_find (marked_trees, t);
1211 return (entry != NULL);
1214 /* Remember given node as a static of some kind: global data,
1215 function-scope static, or an anonymous constant. Its assembler
1216 label is given. */
1218 /* A list of globals whose incomplete declarations we encountered.
1219 Instead of emitting the __mf_register call for them here, it's
1220 delayed until program finish time. If they're still incomplete by
1221 then, warnings are emitted. */
1223 static GTY (()) vec<tree, va_gc> *deferred_static_decls;
1225 /* A list of statements for calling __mf_register() at startup time. */
1226 static GTY (()) tree enqueued_call_stmt_chain;
1228 static void
1229 mudflap_register_call (tree obj, tree object_size, tree varname)
1231 tree arg, call_stmt;
1233 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1234 arg = fold_convert (ptr_type_node, arg);
1236 call_stmt = build_call_expr (mf_register_fndecl, 4,
1237 arg,
1238 fold_convert (size_type_node, object_size),
1239 /* __MF_TYPE_STATIC */
1240 build_int_cst (integer_type_node, 4),
1241 varname);
1243 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1246 void
1247 mudflap_enqueue_decl (tree obj)
1249 if (mf_marked_p (obj))
1250 return;
1252 /* We don't need to process variable decls that are internally
1253 generated extern. If we did, we'd end up with warnings for them
1254 during mudflap_finish_file (). That would confuse the user,
1255 since the text would refer to variables that don't show up in the
1256 user's source code. */
1257 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1258 return;
1260 vec_safe_push (deferred_static_decls, obj);
1264 void
1265 mudflap_enqueue_constant (tree obj)
1267 tree object_size, varname;
1269 if (mf_marked_p (obj))
1270 return;
1272 if (TREE_CODE (obj) == STRING_CST)
1273 object_size = size_int (TREE_STRING_LENGTH (obj));
1274 else
1275 object_size = size_in_bytes (TREE_TYPE (obj));
1277 if (TREE_CODE (obj) == STRING_CST)
1278 varname = mf_build_string ("string literal");
1279 else
1280 varname = mf_build_string ("constant");
1282 mudflap_register_call (obj, object_size, varname);
1286 /* Emit any file-wide instrumentation. */
1287 void
1288 mudflap_finish_file (void)
1290 tree ctor_statements = NULL_TREE;
1292 /* No need to continue when there were errors. */
1293 if (seen_error ())
1294 return;
1296 /* Insert a call to __mf_init. */
1298 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1299 append_to_statement_list (call2_stmt, &ctor_statements);
1302 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1303 if (flag_mudflap_ignore_reads)
1305 tree arg = mf_build_string ("-ignore-reads");
1306 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1307 append_to_statement_list (call_stmt, &ctor_statements);
1310 /* Process all enqueued object decls. */
1311 if (deferred_static_decls)
1313 size_t i;
1314 tree obj;
1315 FOR_EACH_VEC_ELT (*deferred_static_decls, i, obj)
1317 gcc_assert (DECL_P (obj));
1319 if (mf_marked_p (obj))
1320 continue;
1322 /* Omit registration for static unaddressed objects. NB:
1323 Perform registration for non-static objects regardless of
1324 TREE_USED or TREE_ADDRESSABLE, because they may be used
1325 from other compilation units. */
1326 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1327 continue;
1329 /* If we're neither emitting nor referencing the symbol,
1330 don't register it. We have to register external symbols
1331 if they happen to be in other files not compiled with
1332 mudflap (say system libraries), and we must not register
1333 internal symbols that we don't emit or they'll become
1334 dangling references or force symbols to be emitted that
1335 didn't have to. */
1336 if (!symtab_get_node (obj))
1337 continue;
1339 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1341 warning (OPT_Wmudflap,
1342 "mudflap cannot track unknown size extern %qE",
1343 DECL_NAME (obj));
1344 continue;
1347 mudflap_register_call (obj,
1348 size_in_bytes (TREE_TYPE (obj)),
1349 mf_varname_tree (obj));
1352 deferred_static_decls->truncate (0);
1355 /* Append all the enqueued registration calls. */
1356 if (enqueued_call_stmt_chain)
1358 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1359 enqueued_call_stmt_chain = NULL_TREE;
1362 cgraph_build_static_cdtor ('I', ctor_statements,
1363 MAX_RESERVED_INIT_PRIORITY-1);
1367 static bool
1368 gate_mudflap (void)
1370 return flag_mudflap != 0;
1373 namespace {
1375 const pass_data pass_data_mudflap_1 =
1377 GIMPLE_PASS, /* type */
1378 "mudflap1", /* name */
1379 OPTGROUP_NONE, /* optinfo_flags */
1380 true, /* has_gate */
1381 true, /* has_execute */
1382 TV_NONE, /* tv_id */
1383 PROP_gimple_any, /* properties_required */
1384 0, /* properties_provided */
1385 0, /* properties_destroyed */
1386 0, /* todo_flags_start */
1387 0, /* todo_flags_finish */
1390 class pass_mudflap_1 : public gimple_opt_pass
1392 public:
1393 pass_mudflap_1 (gcc::context *ctxt)
1394 : gimple_opt_pass (pass_data_mudflap_1, ctxt)
1397 /* opt_pass methods: */
1398 bool gate () { return gate_mudflap (); }
1399 unsigned int execute () { return execute_mudflap_function_decls (); }
1401 }; // class pass_mudflap_1
1403 } // anon namespace
1405 gimple_opt_pass *
1406 make_pass_mudflap_1 (gcc::context *ctxt)
1408 return new pass_mudflap_1 (ctxt);
1411 namespace {
1413 const pass_data pass_data_mudflap_2 =
1415 GIMPLE_PASS, /* type */
1416 "mudflap2", /* name */
1417 OPTGROUP_NONE, /* optinfo_flags */
1418 true, /* has_gate */
1419 true, /* has_execute */
1420 TV_NONE, /* tv_id */
1421 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
1422 0, /* properties_provided */
1423 0, /* properties_destroyed */
1424 0, /* todo_flags_start */
1425 ( TODO_verify_flow | TODO_verify_stmts
1426 | TODO_update_ssa ), /* todo_flags_finish */
1429 class pass_mudflap_2 : public gimple_opt_pass
1431 public:
1432 pass_mudflap_2 (gcc::context *ctxt)
1433 : gimple_opt_pass (pass_data_mudflap_2, ctxt)
1436 /* opt_pass methods: */
1437 bool gate () { return gate_mudflap (); }
1438 unsigned int execute () { return execute_mudflap_function_ops (); }
1440 }; // class pass_mudflap_2
1442 } // anon namespace
1444 gimple_opt_pass *
1445 make_pass_mudflap_2 (gcc::context *ctxt)
1447 return new pass_mudflap_2 (ctxt);
1450 #include "gt-tree-mudflap.h"