kernel/{ext2,u}fs: Remove unused 'in_exists' from struct indir.
[dragonfly.git] / contrib / gcc-4.7 / gcc / tree-mudflap.c
blobe4f6ec0b3319c94599400e20f0df673409c353df
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2012
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-inline.h"
34 #include "gimple.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "hashtab.h"
41 #include "diagnostic.h"
42 #include "demangle.h"
43 #include "langhooks.h"
44 #include "ggc.h"
45 #include "cgraph.h"
46 #include "gimple.h"
48 /* Internal function decls */
51 /* Options. */
52 #define flag_mudflap_threads (flag_mudflap == 2)
54 /* Helpers. */
55 static tree mf_build_string (const char *string);
56 static tree mf_varname_tree (tree);
57 static tree mf_file_function_line_tree (location_t);
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq, tree);
67 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
68 struct walk_stmt_info *);
69 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
70 static unsigned int execute_mudflap_function_decls (void);
72 /* Return true if DECL is artificial stub that shouldn't be instrumented by
73 mf. We should instrument clones of non-artificial functions. */
74 static inline bool
75 mf_artificial (const_tree decl)
77 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
80 /* ------------------------------------------------------------------------ */
81 /* Some generally helpful functions for mudflap instrumentation. */
83 /* Build a reference to a literal string. */
84 static tree
85 mf_build_string (const char *string)
87 size_t len = strlen (string);
88 tree result = mf_mark (build_string (len + 1, string));
90 TREE_TYPE (result) = build_array_type
91 (char_type_node, build_index_type (size_int (len)));
92 TREE_CONSTANT (result) = 1;
93 TREE_READONLY (result) = 1;
94 TREE_STATIC (result) = 1;
96 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
98 return mf_mark (result);
101 /* Create a properly typed STRING_CST node that describes the given
102 declaration. It will be used as an argument for __mf_register().
103 Try to construct a helpful string, including file/function/variable
104 name. */
106 static tree
107 mf_varname_tree (tree decl)
109 static pretty_printer buf_rec;
110 static int initialized = 0;
111 pretty_printer *buf = & buf_rec;
112 const char *buf_contents;
113 tree result;
115 gcc_assert (decl);
117 if (!initialized)
119 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
120 initialized = 1;
122 pp_clear_output_area (buf);
124 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
126 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
127 const char *sourcefile;
128 unsigned sourceline = xloc.line;
129 unsigned sourcecolumn = 0;
130 sourcecolumn = xloc.column;
131 sourcefile = xloc.file;
132 if (sourcefile == NULL && current_function_decl != NULL_TREE)
133 sourcefile = DECL_SOURCE_FILE (current_function_decl);
134 if (sourcefile == NULL)
135 sourcefile = "<unknown file>";
137 pp_string (buf, sourcefile);
139 if (sourceline != 0)
141 pp_string (buf, ":");
142 pp_decimal_int (buf, sourceline);
144 if (sourcecolumn != 0)
146 pp_string (buf, ":");
147 pp_decimal_int (buf, sourcecolumn);
152 if (current_function_decl != NULL_TREE)
154 /* Add (FUNCTION) */
155 pp_string (buf, " (");
157 const char *funcname = NULL;
158 if (DECL_NAME (current_function_decl))
159 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
160 if (funcname == NULL)
161 funcname = "anonymous fn";
163 pp_string (buf, funcname);
165 pp_string (buf, ") ");
167 else
168 pp_string (buf, " ");
170 /* Add <variable-declaration>, possibly demangled. */
172 const char *declname = NULL;
174 if (DECL_NAME (decl) != NULL)
176 if (strcmp ("GNU C++", lang_hooks.name) == 0)
178 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
179 the libiberty demangler. */
180 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
181 DMGL_AUTO | DMGL_VERBOSE);
183 if (declname == NULL)
184 declname = lang_hooks.decl_printable_name (decl, 3);
186 if (declname == NULL)
187 declname = "<unnamed variable>";
189 pp_string (buf, declname);
192 /* Return the lot as a new STRING_CST. */
193 buf_contents = pp_base_formatted_text (buf);
194 result = mf_build_string (buf_contents);
195 pp_clear_output_area (buf);
197 return result;
201 /* And another friend, for producing a simpler message. */
203 static tree
204 mf_file_function_line_tree (location_t location)
206 expanded_location xloc = expand_location (location);
207 const char *file = NULL, *colon, *line, *op, *name, *cp;
208 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
209 char *string;
210 tree result;
212 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
213 file = xloc.file;
214 if (file == NULL && current_function_decl != NULL_TREE)
215 file = DECL_SOURCE_FILE (current_function_decl);
216 if (file == NULL)
217 file = "<unknown file>";
219 if (xloc.line > 0)
221 if (xloc.column > 0)
222 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
223 else
224 sprintf (linecolbuf, "%d", xloc.line);
225 colon = ":";
226 line = linecolbuf;
228 else
229 colon = line = "";
231 /* Add (FUNCTION). */
232 name = lang_hooks.decl_printable_name (current_function_decl, 1);
233 if (name)
235 op = " (";
236 cp = ")";
238 else
239 op = name = cp = "";
241 string = concat (file, colon, line, op, name, cp, NULL);
242 result = mf_build_string (string);
243 free (string);
245 return result;
249 /* global tree nodes */
251 /* Global tree objects for global variables and functions exported by
252 mudflap runtime library. mf_init_extern_trees must be called
253 before using these. */
255 /* uintptr_t (usually "unsigned long") */
256 static GTY (()) tree mf_uintptr_type;
258 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
259 static GTY (()) tree mf_cache_struct_type;
261 /* struct __mf_cache * const */
262 static GTY (()) tree mf_cache_structptr_type;
264 /* extern struct __mf_cache __mf_lookup_cache []; */
265 static GTY (()) tree mf_cache_array_decl;
267 /* extern unsigned char __mf_lc_shift; */
268 static GTY (()) tree mf_cache_shift_decl;
270 /* extern uintptr_t __mf_lc_mask; */
271 static GTY (()) tree mf_cache_mask_decl;
273 /* Their function-scope local shadows, used in single-threaded mode only. */
275 /* auto const unsigned char __mf_lc_shift_l; */
276 static GTY (()) tree mf_cache_shift_decl_l;
278 /* auto const uintptr_t __mf_lc_mask_l; */
279 static GTY (()) tree mf_cache_mask_decl_l;
281 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
282 static GTY (()) tree mf_check_fndecl;
284 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
285 static GTY (()) tree mf_register_fndecl;
287 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
288 static GTY (()) tree mf_unregister_fndecl;
290 /* extern void __mf_init (); */
291 static GTY (()) tree mf_init_fndecl;
293 /* extern int __mf_set_options (const char*); */
294 static GTY (()) tree mf_set_options_fndecl;
297 /* Helper for mudflap_init: construct a decl with the given category,
298 name, and type, mark it an external reference, and pushdecl it. */
299 static inline tree
300 mf_make_builtin (enum tree_code category, const char *name, tree type)
302 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
303 category, get_identifier (name), type));
304 TREE_PUBLIC (decl) = 1;
305 DECL_EXTERNAL (decl) = 1;
306 lang_hooks.decls.pushdecl (decl);
307 /* The decl was declared by the compiler. */
308 DECL_ARTIFICIAL (decl) = 1;
309 /* And we don't want debug info for it. */
310 DECL_IGNORED_P (decl) = 1;
311 return decl;
314 /* Helper for mudflap_init: construct a tree corresponding to the type
315 struct __mf_cache { uintptr_t low; uintptr_t high; };
316 where uintptr_t is the FIELD_TYPE argument. */
317 static inline tree
318 mf_make_mf_cache_struct_type (tree field_type)
320 /* There is, abominably, no language-independent way to construct a
321 RECORD_TYPE. So we have to call the basic type construction
322 primitives by hand. */
323 tree fieldlo = build_decl (UNKNOWN_LOCATION,
324 FIELD_DECL, get_identifier ("low"), field_type);
325 tree fieldhi = build_decl (UNKNOWN_LOCATION,
326 FIELD_DECL, get_identifier ("high"), field_type);
328 tree struct_type = make_node (RECORD_TYPE);
329 DECL_CONTEXT (fieldlo) = struct_type;
330 DECL_CONTEXT (fieldhi) = struct_type;
331 DECL_CHAIN (fieldlo) = fieldhi;
332 TYPE_FIELDS (struct_type) = fieldlo;
333 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
334 layout_type (struct_type);
336 return struct_type;
339 /* Initialize the global tree nodes that correspond to mf-runtime.h
340 declarations. */
341 void
342 mudflap_init (void)
344 static bool done = false;
345 tree mf_const_string_type;
346 tree mf_cache_array_type;
347 tree mf_check_register_fntype;
348 tree mf_unregister_fntype;
349 tree mf_init_fntype;
350 tree mf_set_options_fntype;
352 if (done)
353 return;
354 done = true;
356 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
357 /*unsignedp=*/true);
358 mf_const_string_type
359 = build_pointer_type (build_qualified_type
360 (char_type_node, TYPE_QUAL_CONST));
362 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
363 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
364 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
365 mf_check_register_fntype =
366 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
367 integer_type_node, mf_const_string_type, NULL_TREE);
368 mf_unregister_fntype =
369 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
370 integer_type_node, NULL_TREE);
371 mf_init_fntype =
372 build_function_type_list (void_type_node, NULL_TREE);
373 mf_set_options_fntype =
374 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
376 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
377 mf_cache_array_type);
378 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
379 unsigned_char_type_node);
380 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
381 mf_uintptr_type);
382 /* Don't process these in mudflap_enqueue_decl, should they come by
383 there for some reason. */
384 mf_mark (mf_cache_array_decl);
385 mf_mark (mf_cache_shift_decl);
386 mf_mark (mf_cache_mask_decl);
387 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
388 mf_check_register_fntype);
389 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
390 mf_check_register_fntype);
391 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
392 mf_unregister_fntype);
393 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
394 mf_init_fntype);
395 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
396 mf_set_options_fntype);
400 /* ------------------------------------------------------------------------ */
401 /* This is the second part of the mudflap instrumentation. It works on
402 low-level GIMPLE using the CFG, because we want to run this pass after
403 tree optimizations have been performed, but we have to preserve the CFG
404 for expansion from trees to RTL.
405 Below is the list of transformations performed on statements in the
406 current function.
408 1) Memory reference transforms: Perform the mudflap indirection-related
409 tree transforms on memory references.
411 2) Mark BUILTIN_ALLOCA calls not inlineable.
415 static unsigned int
416 execute_mudflap_function_ops (void)
418 struct gimplify_ctx gctx;
420 /* Don't instrument functions such as the synthetic constructor
421 built during mudflap_finish_file. */
422 if (mf_marked_p (current_function_decl)
423 || mf_artificial (current_function_decl))
424 return 0;
426 push_gimplify_context (&gctx);
428 add_referenced_var (mf_cache_array_decl);
429 add_referenced_var (mf_cache_shift_decl);
430 add_referenced_var (mf_cache_mask_decl);
432 /* In multithreaded mode, don't cache the lookup cache parameters. */
433 if (! flag_mudflap_threads)
434 mf_decl_cache_locals ();
436 mf_xform_statements ();
438 if (! flag_mudflap_threads)
439 mf_decl_clear_locals ();
441 pop_gimplify_context (NULL);
442 return 0;
445 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
446 if BB has more than one edge, STMT will be replicated for each edge.
447 Also, abnormal edges will be ignored. */
449 static void
450 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
452 edge e;
453 edge_iterator ei;
454 unsigned n_copies = -1;
456 FOR_EACH_EDGE (e, ei, bb->succs)
457 if (!(e->flags & EDGE_ABNORMAL))
458 n_copies++;
460 FOR_EACH_EDGE (e, ei, bb->succs)
461 if (!(e->flags & EDGE_ABNORMAL))
462 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
465 /* Create and initialize local shadow variables for the lookup cache
466 globals. Put their decls in the *_l globals for use by
467 mf_build_check_statement_for. */
469 static void
470 mf_decl_cache_locals (void)
472 gimple g;
473 gimple_seq seq = gimple_seq_alloc ();
475 /* Build the cache vars. */
476 mf_cache_shift_decl_l
477 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
478 "__mf_lookup_shift_l"));
480 mf_cache_mask_decl_l
481 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
482 "__mf_lookup_mask_l"));
484 /* Build initialization nodes for the cache vars. We just load the
485 globals into the cache variables. */
486 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
487 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
488 gimple_seq_add_stmt (&seq, g);
490 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
491 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
492 gimple_seq_add_stmt (&seq, g);
494 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
496 gsi_commit_edge_inserts ();
500 static void
501 mf_decl_clear_locals (void)
503 /* Unset local shadows. */
504 mf_cache_shift_decl_l = NULL_TREE;
505 mf_cache_mask_decl_l = NULL_TREE;
508 static void
509 mf_build_check_statement_for (tree base, tree limit,
510 gimple_stmt_iterator *instr_gsi,
511 location_t location, tree dirflag)
513 gimple_stmt_iterator gsi;
514 basic_block cond_bb, then_bb, join_bb;
515 edge e;
516 tree cond, t, u, v;
517 tree mf_base;
518 tree mf_elem;
519 tree mf_limit;
520 gimple g;
521 gimple_seq seq, stmts;
523 /* We first need to split the current basic block, and start altering
524 the CFG. This allows us to insert the statements we're about to
525 construct into the right basic blocks. */
527 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
528 gsi = *instr_gsi;
529 gsi_prev (&gsi);
530 if (! gsi_end_p (gsi))
531 e = split_block (cond_bb, gsi_stmt (gsi));
532 else
533 e = split_block_after_labels (cond_bb);
534 cond_bb = e->src;
535 join_bb = e->dest;
537 /* A recap at this point: join_bb is the basic block at whose head
538 is the gimple statement for which this check expression is being
539 built. cond_bb is the (possibly new, synthetic) basic block the
540 end of which will contain the cache-lookup code, and a
541 conditional that jumps to the cache-miss code or, much more
542 likely, over to join_bb. */
544 /* Create the bb that contains the cache-miss fallback block (mf_check). */
545 then_bb = create_empty_bb (cond_bb);
546 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
547 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
549 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
550 e = find_edge (cond_bb, join_bb);
551 e->flags = EDGE_FALSE_VALUE;
552 e->count = cond_bb->count;
553 e->probability = REG_BR_PROB_BASE;
555 /* Update dominance info. Note that bb_join's data was
556 updated by split_block. */
557 if (dom_info_available_p (CDI_DOMINATORS))
559 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
560 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
563 /* Build our local variables. */
564 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
565 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
566 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
568 /* Build: __mf_base = (uintptr_t) <base address expression>. */
569 seq = gimple_seq_alloc ();
570 t = fold_convert_loc (location, mf_uintptr_type,
571 unshare_expr (base));
572 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
573 gimple_seq_add_seq (&seq, stmts);
574 g = gimple_build_assign (mf_base, t);
575 gimple_set_location (g, location);
576 gimple_seq_add_stmt (&seq, g);
578 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
579 t = fold_convert_loc (location, mf_uintptr_type,
580 unshare_expr (limit));
581 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
582 gimple_seq_add_seq (&seq, stmts);
583 g = gimple_build_assign (mf_limit, t);
584 gimple_set_location (g, location);
585 gimple_seq_add_stmt (&seq, g);
587 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
588 & __mf_mask]. */
589 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
590 flag_mudflap_threads ? mf_cache_shift_decl
591 : mf_cache_shift_decl_l);
592 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
593 flag_mudflap_threads ? mf_cache_mask_decl
594 : mf_cache_mask_decl_l);
595 t = build4 (ARRAY_REF,
596 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
597 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
598 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
599 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
600 gimple_seq_add_seq (&seq, stmts);
601 g = gimple_build_assign (mf_elem, t);
602 gimple_set_location (g, location);
603 gimple_seq_add_stmt (&seq, g);
605 /* Quick validity check.
607 if (__mf_elem->low > __mf_base
608 || (__mf_elem_high < __mf_limit))
610 __mf_check ();
611 ... and only if single-threaded:
612 __mf_lookup_shift_1 = f...;
613 __mf_lookup_mask_l = ...;
616 It is expected that this body of code is rarely executed so we mark
617 the edge to the THEN clause of the conditional jump as unlikely. */
619 /* Construct t <-- '__mf_elem->low > __mf_base'. */
620 t = build3 (COMPONENT_REF, mf_uintptr_type,
621 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
622 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
623 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
625 /* Construct '__mf_elem->high < __mf_limit'.
627 First build:
628 1) u <-- '__mf_elem->high'
629 2) v <-- '__mf_limit'.
631 Then build 'u <-- (u < v). */
633 u = build3 (COMPONENT_REF, mf_uintptr_type,
634 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
635 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
637 v = mf_limit;
639 u = build2 (LT_EXPR, boolean_type_node, u, v);
641 /* Build the composed conditional: t <-- 't || u'. Then store the
642 result of the evaluation of 't' in a temporary variable which we
643 can use as the condition for the conditional jump. */
644 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
645 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
646 gimple_seq_add_seq (&seq, stmts);
647 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
648 g = gimple_build_assign (cond, t);
649 gimple_set_location (g, location);
650 gimple_seq_add_stmt (&seq, g);
652 /* Build the conditional jump. 'cond' is just a temporary so we can
653 simply build a void COND_EXPR. We do need labels in both arms though. */
654 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
655 NULL_TREE);
656 gimple_set_location (g, location);
657 gimple_seq_add_stmt (&seq, g);
659 /* At this point, after so much hard work, we have only constructed
660 the conditional jump,
662 if (__mf_elem->low > __mf_base
663 || (__mf_elem_high < __mf_limit))
665 The lowered GIMPLE tree representing this code is in the statement
666 list starting at 'head'.
668 We can insert this now in the current basic block, i.e. the one that
669 the statement we're instrumenting was originally in. */
670 gsi = gsi_last_bb (cond_bb);
671 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
673 /* Now build up the body of the cache-miss handling:
675 __mf_check();
676 refresh *_l vars.
678 This is the body of the conditional. */
680 seq = gimple_seq_alloc ();
681 /* u is a string, so it is already a gimple value. */
682 u = mf_file_function_line_tree (location);
683 /* NB: we pass the overall [base..limit] range to mf_check. */
684 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
685 fold_build2_loc (location,
686 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
687 build_int_cst (mf_uintptr_type, 1));
688 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
689 gimple_seq_add_seq (&seq, stmts);
690 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
691 gimple_seq_add_stmt (&seq, g);
693 if (! flag_mudflap_threads)
695 if (stmt_ends_bb_p (g))
697 gsi = gsi_start_bb (then_bb);
698 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
699 e = split_block (then_bb, g);
700 then_bb = e->dest;
701 seq = gimple_seq_alloc ();
704 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
705 gimple_seq_add_stmt (&seq, g);
707 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
708 gimple_seq_add_stmt (&seq, g);
711 /* Insert the check code in the THEN block. */
712 gsi = gsi_start_bb (then_bb);
713 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
715 *instr_gsi = gsi_start_bb (join_bb);
719 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
720 eligible for instrumentation. For the mudflap1 pass, this implies
721 that it should be registered with the libmudflap runtime. For the
722 mudflap2 pass this means instrumenting an indirection operation with
723 respect to the object.
725 static int
726 mf_decl_eligible_p (tree decl)
728 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
729 /* The decl must have its address taken. In the case of
730 arrays, this flag is also set if the indexes are not
731 compile-time known valid constants. */
732 /* XXX: not sufficient: return-by-value structs! */
733 && TREE_ADDRESSABLE (decl)
734 /* The type of the variable must be complete. */
735 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
736 /* The decl hasn't been decomposed somehow. */
737 && !DECL_HAS_VALUE_EXPR_P (decl));
741 static void
742 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
743 location_t location, tree dirflag)
745 tree type, base, limit, addr, size, t;
747 /* Don't instrument read operations. */
748 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
749 return;
751 /* Don't instrument marked nodes. */
752 if (mf_marked_p (*tp))
753 return;
755 t = *tp;
756 type = TREE_TYPE (t);
758 if (type == error_mark_node)
759 return;
761 size = TYPE_SIZE_UNIT (type);
763 switch (TREE_CODE (t))
765 case ARRAY_REF:
766 case COMPONENT_REF:
768 /* This is trickier than it may first appear. The reason is
769 that we are looking at expressions from the "inside out" at
770 this point. We may have a complex nested aggregate/array
771 expression (e.g. "a.b[i].c"), maybe with an indirection as
772 the leftmost operator ("p->a.b.d"), where instrumentation
773 is necessary. Or we may have an innocent "a.b.c"
774 expression that must not be instrumented. We need to
775 recurse all the way down the nesting structure to figure it
776 out: looking just at the outer node is not enough. */
777 tree var;
778 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
779 /* If we have a bitfield component reference, we must note the
780 innermost addressable object in ELT, from which we will
781 construct the byte-addressable bounds of the bitfield. */
782 tree elt = NULL_TREE;
783 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
784 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
786 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
787 containment hierarchy to find the outermost VAR_DECL. */
788 var = TREE_OPERAND (t, 0);
789 while (1)
791 if (bitfield_ref_p && elt == NULL_TREE
792 && (TREE_CODE (var) == ARRAY_REF
793 || TREE_CODE (var) == COMPONENT_REF))
794 elt = var;
796 if (TREE_CODE (var) == ARRAY_REF)
798 component_ref_only = 0;
799 var = TREE_OPERAND (var, 0);
801 else if (TREE_CODE (var) == COMPONENT_REF)
802 var = TREE_OPERAND (var, 0);
803 else if (INDIRECT_REF_P (var)
804 || TREE_CODE (var) == MEM_REF)
806 base = TREE_OPERAND (var, 0);
807 break;
809 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
811 var = TREE_OPERAND (var, 0);
812 if (CONSTANT_CLASS_P (var)
813 && TREE_CODE (var) != STRING_CST)
814 return;
816 else
818 gcc_assert (TREE_CODE (var) == VAR_DECL
819 || TREE_CODE (var) == PARM_DECL
820 || TREE_CODE (var) == RESULT_DECL
821 || TREE_CODE (var) == STRING_CST);
822 /* Don't instrument this access if the underlying
823 variable is not "eligible". This test matches
824 those arrays that have only known-valid indexes,
825 and thus are not labeled TREE_ADDRESSABLE. */
826 if (! mf_decl_eligible_p (var) || component_ref_only)
827 return;
828 else
830 base = build1 (ADDR_EXPR,
831 build_pointer_type (TREE_TYPE (var)), var);
832 break;
837 /* Handle the case of ordinary non-indirection structure
838 accesses. These have only nested COMPONENT_REF nodes (no
839 INDIRECT_REF), but pass through the above filter loop.
840 Note that it's possible for such a struct variable to match
841 the eligible_p test because someone else might take its
842 address sometime. */
844 /* We need special processing for bitfield components, because
845 their addresses cannot be taken. */
846 if (bitfield_ref_p)
848 tree field = TREE_OPERAND (t, 1);
850 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
851 size = DECL_SIZE_UNIT (field);
853 if (elt)
854 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
855 elt);
856 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
857 addr = fold_build_pointer_plus_loc (location,
858 addr, byte_position (field));
860 else
861 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
863 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
864 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
865 fold_convert (mf_uintptr_type, addr),
866 size),
867 integer_one_node);
869 break;
871 case INDIRECT_REF:
872 addr = TREE_OPERAND (t, 0);
873 base = addr;
874 limit = fold_build_pointer_plus_hwi_loc
875 (location, fold_build_pointer_plus_loc (location, base, size), -1);
876 break;
878 case MEM_REF:
879 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
880 TREE_OPERAND (t, 1));
881 base = addr;
882 limit = fold_build_pointer_plus_hwi_loc (location,
883 fold_build_pointer_plus_loc (location,
884 base, size), -1);
885 break;
887 case TARGET_MEM_REF:
888 addr = tree_mem_ref_addr (ptr_type_node, t);
889 base = addr;
890 limit = fold_build_pointer_plus_hwi_loc (location,
891 fold_build_pointer_plus_loc (location,
892 base, size), -1);
893 break;
895 case ARRAY_RANGE_REF:
896 warning (OPT_Wmudflap,
897 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
898 return;
900 case BIT_FIELD_REF:
901 /* ??? merge with COMPONENT_REF code above? */
903 tree ofs, rem, bpu;
905 /* If we're not dereferencing something, then the access
906 must be ok. */
907 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
908 return;
910 bpu = bitsize_int (BITS_PER_UNIT);
911 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
912 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
913 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
915 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
916 size = size_binop_loc (location, PLUS_EXPR, size, rem);
917 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
918 size = fold_convert (sizetype, size);
920 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
921 addr = fold_convert (ptr_type_node, addr);
922 addr = fold_build_pointer_plus_loc (location, addr, ofs);
924 base = addr;
925 limit = fold_build_pointer_plus_hwi_loc (location,
926 fold_build_pointer_plus_loc (location,
927 base, size), -1);
929 break;
931 default:
932 return;
935 mf_build_check_statement_for (base, limit, iter, location, dirflag);
937 /* Transform
938 1) Memory references.
940 static void
941 mf_xform_statements (void)
943 basic_block bb, next;
944 gimple_stmt_iterator i;
945 int saved_last_basic_block = last_basic_block;
946 enum gimple_rhs_class grhs_class;
948 bb = ENTRY_BLOCK_PTR ->next_bb;
951 next = bb->next_bb;
952 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
954 gimple s = gsi_stmt (i);
956 /* Only a few GIMPLE statements can reference memory. */
957 switch (gimple_code (s))
959 case GIMPLE_ASSIGN:
960 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
961 gimple_location (s), integer_one_node);
962 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
963 gimple_location (s), integer_zero_node);
964 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
965 if (grhs_class == GIMPLE_BINARY_RHS)
966 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
967 gimple_location (s), integer_zero_node);
968 break;
970 case GIMPLE_RETURN:
971 if (gimple_return_retval (s) != NULL_TREE)
973 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
974 gimple_location (s),
975 integer_zero_node);
977 break;
979 default:
983 bb = next;
985 while (bb && bb->index <= saved_last_basic_block);
988 /* ------------------------------------------------------------------------ */
989 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
990 transforms on the current function.
992 This is the first part of the mudflap instrumentation. It works on
993 high-level GIMPLE because after lowering, all variables are moved out
994 of their BIND_EXPR binding context, and we lose liveness information
995 for the declarations we wish to instrument. */
997 static unsigned int
998 execute_mudflap_function_decls (void)
1000 struct gimplify_ctx gctx;
1002 /* Don't instrument functions such as the synthetic constructor
1003 built during mudflap_finish_file. */
1004 if (mf_marked_p (current_function_decl)
1005 || mf_artificial (current_function_decl))
1006 return 0;
1008 push_gimplify_context (&gctx);
1010 mf_xform_decls (gimple_body (current_function_decl),
1011 DECL_ARGUMENTS (current_function_decl));
1013 pop_gimplify_context (NULL);
1014 return 0;
1017 /* This struct is passed between mf_xform_decls to store state needed
1018 during the traversal searching for objects that have their
1019 addresses taken. */
1020 struct mf_xform_decls_data
1022 tree param_decls;
1026 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1027 _DECLs if appropriate. Arrange to call the __mf_register function
1028 now, and the __mf_unregister function later for each. Return the
1029 gimple sequence after synthesis. */
1030 gimple_seq
1031 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1033 gimple_seq finally_stmts = NULL;
1034 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1036 while (decl != NULL_TREE)
1038 if (mf_decl_eligible_p (decl)
1039 /* Not already processed. */
1040 && ! mf_marked_p (decl)
1041 /* Automatic variable. */
1042 && ! DECL_EXTERNAL (decl)
1043 && ! TREE_STATIC (decl))
1045 tree size = NULL_TREE, variable_name;
1046 gimple unregister_fncall, register_fncall;
1047 tree unregister_fncall_param, register_fncall_param;
1049 /* Variable-sized objects should have sizes already been
1050 gimplified when we got here. */
1051 size = fold_convert (size_type_node,
1052 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1053 gcc_assert (is_gimple_val (size));
1056 unregister_fncall_param =
1057 mf_mark (build1 (ADDR_EXPR,
1058 build_pointer_type (TREE_TYPE (decl)),
1059 decl));
1060 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1061 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1062 unregister_fncall_param,
1063 size,
1064 integer_three_node);
1067 variable_name = mf_varname_tree (decl);
1068 register_fncall_param =
1069 mf_mark (build1 (ADDR_EXPR,
1070 build_pointer_type (TREE_TYPE (decl)),
1071 decl));
1072 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1073 "name") */
1074 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1075 register_fncall_param,
1076 size,
1077 integer_three_node,
1078 variable_name);
1081 /* Accumulate the two calls. */
1082 gimple_set_location (register_fncall, location);
1083 gimple_set_location (unregister_fncall, location);
1085 /* Add the __mf_register call at the current appending point. */
1086 if (gsi_end_p (initially_stmts))
1088 if (!mf_artificial (decl))
1089 warning (OPT_Wmudflap,
1090 "mudflap cannot track %qE in stub function",
1091 DECL_NAME (decl));
1093 else
1095 gsi_insert_before (&initially_stmts, register_fncall,
1096 GSI_SAME_STMT);
1098 /* Accumulate the FINALLY piece. */
1099 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1101 mf_mark (decl);
1104 decl = DECL_CHAIN (decl);
1107 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1108 if (finally_stmts != NULL)
1110 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1111 gimple_seq new_seq = gimple_seq_alloc ();
1113 gimple_seq_add_stmt (&new_seq, stmt);
1114 return new_seq;
1116 else
1117 return seq;
1121 /* Process every variable mentioned in BIND_EXPRs. */
1122 static tree
1123 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1124 bool *handled_operands_p ATTRIBUTE_UNUSED,
1125 struct walk_stmt_info *wi)
1127 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1128 gimple stmt = gsi_stmt (*gsi);
1130 switch (gimple_code (stmt))
1132 case GIMPLE_BIND:
1134 /* Process function parameters now (but only once). */
1135 if (d->param_decls)
1137 gimple_bind_set_body (stmt,
1138 mx_register_decls (d->param_decls,
1139 gimple_bind_body (stmt),
1140 gimple_location (stmt)));
1141 d->param_decls = NULL_TREE;
1144 gimple_bind_set_body (stmt,
1145 mx_register_decls (gimple_bind_vars (stmt),
1146 gimple_bind_body (stmt),
1147 gimple_location (stmt)));
1149 break;
1151 default:
1152 break;
1155 return NULL_TREE;
1158 /* Perform the object lifetime tracking mudflap transform on the given function
1159 tree. The tree is mutated in place, with possibly copied subtree nodes.
1161 For every auto variable declared, if its address is ever taken
1162 within the function, then supply its lifetime to the mudflap
1163 runtime with the __mf_register and __mf_unregister calls.
1166 static void
1167 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1169 struct mf_xform_decls_data d;
1170 struct walk_stmt_info wi;
1171 struct pointer_set_t *pset = pointer_set_create ();
1173 d.param_decls = fnparams;
1174 memset (&wi, 0, sizeof (wi));
1175 wi.info = (void*) &d;
1176 wi.pset = pset;
1177 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1178 pointer_set_destroy (pset);
1182 /* ------------------------------------------------------------------------ */
1183 /* Externally visible mudflap functions. */
1186 /* Mark and return the given tree node to prevent further mudflap
1187 transforms. */
1188 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1190 tree
1191 mf_mark (tree t)
1193 void **slot;
1195 if (marked_trees == NULL)
1196 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1197 NULL);
1199 slot = htab_find_slot (marked_trees, t, INSERT);
1200 *slot = t;
1201 return t;
1205 mf_marked_p (tree t)
1207 void *entry;
1209 if (marked_trees == NULL)
1210 return 0;
1212 entry = htab_find (marked_trees, t);
1213 return (entry != NULL);
1216 /* Remember given node as a static of some kind: global data,
1217 function-scope static, or an anonymous constant. Its assembler
1218 label is given. */
1220 /* A list of globals whose incomplete declarations we encountered.
1221 Instead of emitting the __mf_register call for them here, it's
1222 delayed until program finish time. If they're still incomplete by
1223 then, warnings are emitted. */
1225 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1227 /* A list of statements for calling __mf_register() at startup time. */
1228 static GTY (()) tree enqueued_call_stmt_chain;
1230 static void
1231 mudflap_register_call (tree obj, tree object_size, tree varname)
1233 tree arg, call_stmt;
1235 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1236 arg = fold_convert (ptr_type_node, arg);
1238 call_stmt = build_call_expr (mf_register_fndecl, 4,
1239 arg,
1240 fold_convert (size_type_node, object_size),
1241 /* __MF_TYPE_STATIC */
1242 build_int_cst (integer_type_node, 4),
1243 varname);
1245 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1248 void
1249 mudflap_enqueue_decl (tree obj)
1251 if (mf_marked_p (obj))
1252 return;
1254 /* We don't need to process variable decls that are internally
1255 generated extern. If we did, we'd end up with warnings for them
1256 during mudflap_finish_file (). That would confuse the user,
1257 since the text would refer to variables that don't show up in the
1258 user's source code. */
1259 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1260 return;
1262 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1266 void
1267 mudflap_enqueue_constant (tree obj)
1269 tree object_size, varname;
1271 if (mf_marked_p (obj))
1272 return;
1274 if (TREE_CODE (obj) == STRING_CST)
1275 object_size = size_int (TREE_STRING_LENGTH (obj));
1276 else
1277 object_size = size_in_bytes (TREE_TYPE (obj));
1279 if (TREE_CODE (obj) == STRING_CST)
1280 varname = mf_build_string ("string literal");
1281 else
1282 varname = mf_build_string ("constant");
1284 mudflap_register_call (obj, object_size, varname);
1288 /* Emit any file-wide instrumentation. */
1289 void
1290 mudflap_finish_file (void)
1292 tree ctor_statements = NULL_TREE;
1294 /* No need to continue when there were errors. */
1295 if (seen_error ())
1296 return;
1298 /* Insert a call to __mf_init. */
1300 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1301 append_to_statement_list (call2_stmt, &ctor_statements);
1304 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1305 if (flag_mudflap_ignore_reads)
1307 tree arg = mf_build_string ("-ignore-reads");
1308 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1309 append_to_statement_list (call_stmt, &ctor_statements);
1312 /* Process all enqueued object decls. */
1313 if (deferred_static_decls)
1315 size_t i;
1316 tree obj;
1317 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1319 gcc_assert (DECL_P (obj));
1321 if (mf_marked_p (obj))
1322 continue;
1324 /* Omit registration for static unaddressed objects. NB:
1325 Perform registration for non-static objects regardless of
1326 TREE_USED or TREE_ADDRESSABLE, because they may be used
1327 from other compilation units. */
1328 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1329 continue;
1331 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1333 warning (OPT_Wmudflap,
1334 "mudflap cannot track unknown size extern %qE",
1335 DECL_NAME (obj));
1336 continue;
1339 mudflap_register_call (obj,
1340 size_in_bytes (TREE_TYPE (obj)),
1341 mf_varname_tree (obj));
1344 VEC_truncate (tree, deferred_static_decls, 0);
1347 /* Append all the enqueued registration calls. */
1348 if (enqueued_call_stmt_chain)
1350 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1351 enqueued_call_stmt_chain = NULL_TREE;
1354 cgraph_build_static_cdtor ('I', ctor_statements,
1355 MAX_RESERVED_INIT_PRIORITY-1);
1359 static bool
1360 gate_mudflap (void)
1362 return flag_mudflap != 0;
1365 struct gimple_opt_pass pass_mudflap_1 =
1368 GIMPLE_PASS,
1369 "mudflap1", /* name */
1370 gate_mudflap, /* gate */
1371 execute_mudflap_function_decls, /* execute */
1372 NULL, /* sub */
1373 NULL, /* next */
1374 0, /* static_pass_number */
1375 TV_NONE, /* tv_id */
1376 PROP_gimple_any, /* properties_required */
1377 0, /* properties_provided */
1378 0, /* properties_destroyed */
1379 0, /* todo_flags_start */
1380 0 /* todo_flags_finish */
1384 struct gimple_opt_pass pass_mudflap_2 =
1387 GIMPLE_PASS,
1388 "mudflap2", /* name */
1389 gate_mudflap, /* gate */
1390 execute_mudflap_function_ops, /* execute */
1391 NULL, /* sub */
1392 NULL, /* next */
1393 0, /* static_pass_number */
1394 TV_NONE, /* tv_id */
1395 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1396 0, /* properties_provided */
1397 0, /* properties_destroyed */
1398 0, /* todo_flags_start */
1399 TODO_verify_flow | TODO_verify_stmts
1400 | TODO_update_ssa /* todo_flags_finish */
1404 #include "gt-tree-mudflap.h"