Fix for PR39557
[official-gcc.git] / gcc / tree-mudflap.c
blob831fcc1791a83cf10ae67aba58354c926a1bbe0d
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "tm_p.h"
32 #include "basic-block.h"
33 #include "flags.h"
34 #include "function.h"
35 #include "tree-inline.h"
36 #include "gimple.h"
37 #include "tree-iterator.h"
38 #include "tree-flow.h"
39 #include "tree-mudflap.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "hashtab.h"
43 #include "diagnostic.h"
44 #include <demangle.h>
45 #include "langhooks.h"
46 #include "ggc.h"
47 #include "cgraph.h"
48 #include "toplev.h"
49 #include "gimple.h"
51 /* Internal function decls */
54 /* Options. */
55 #define flag_mudflap_threads (flag_mudflap == 2)
57 /* Helpers. */
58 static tree mf_build_string (const char *string);
59 static tree mf_varname_tree (tree);
60 static tree mf_file_function_line_tree (location_t);
62 /* Indirection-related instrumentation. */
63 static void mf_decl_cache_locals (void);
64 static void mf_decl_clear_locals (void);
65 static void mf_xform_derefs (void);
66 static unsigned int execute_mudflap_function_ops (void);
68 /* Addressable variables instrumentation. */
69 static void mf_xform_decls (gimple_seq, tree);
70 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
71 struct walk_stmt_info *);
72 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
73 static unsigned int execute_mudflap_function_decls (void);
76 /* ------------------------------------------------------------------------ */
77 /* Some generally helpful functions for mudflap instrumentation. */
79 /* Build a reference to a literal string. */
80 static tree
81 mf_build_string (const char *string)
83 size_t len = strlen (string);
84 tree result = mf_mark (build_string (len + 1, string));
86 TREE_TYPE (result) = build_array_type
87 (char_type_node, build_index_type (build_int_cst (NULL_TREE, len)));
88 TREE_CONSTANT (result) = 1;
89 TREE_READONLY (result) = 1;
90 TREE_STATIC (result) = 1;
92 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
94 return mf_mark (result);
97 /* Create a properly typed STRING_CST node that describes the given
98 declaration. It will be used as an argument for __mf_register().
99 Try to construct a helpful string, including file/function/variable
100 name. */
102 static tree
103 mf_varname_tree (tree decl)
105 static pretty_printer buf_rec;
106 static int initialized = 0;
107 pretty_printer *buf = & buf_rec;
108 const char *buf_contents;
109 tree result;
111 gcc_assert (decl);
113 if (!initialized)
115 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
116 initialized = 1;
118 pp_clear_output_area (buf);
120 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
122 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
123 const char *sourcefile;
124 unsigned sourceline = xloc.line;
125 unsigned sourcecolumn = 0;
126 sourcecolumn = xloc.column;
127 sourcefile = xloc.file;
128 if (sourcefile == NULL && current_function_decl != NULL_TREE)
129 sourcefile = DECL_SOURCE_FILE (current_function_decl);
130 if (sourcefile == NULL)
131 sourcefile = "<unknown file>";
133 pp_string (buf, sourcefile);
135 if (sourceline != 0)
137 pp_string (buf, ":");
138 pp_decimal_int (buf, sourceline);
140 if (sourcecolumn != 0)
142 pp_string (buf, ":");
143 pp_decimal_int (buf, sourcecolumn);
148 if (current_function_decl != NULL_TREE)
150 /* Add (FUNCTION) */
151 pp_string (buf, " (");
153 const char *funcname = NULL;
154 if (DECL_NAME (current_function_decl))
155 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
156 if (funcname == NULL)
157 funcname = "anonymous fn";
159 pp_string (buf, funcname);
161 pp_string (buf, ") ");
163 else
164 pp_string (buf, " ");
166 /* Add <variable-declaration>, possibly demangled. */
168 const char *declname = NULL;
170 if (DECL_NAME (decl) != NULL)
172 if (strcmp ("GNU C++", lang_hooks.name) == 0)
174 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
175 the libiberty demangler. */
176 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
177 DMGL_AUTO | DMGL_VERBOSE);
179 if (declname == NULL)
180 declname = lang_hooks.decl_printable_name (decl, 3);
182 if (declname == NULL)
183 declname = "<unnamed variable>";
185 pp_string (buf, declname);
188 /* Return the lot as a new STRING_CST. */
189 buf_contents = pp_base_formatted_text (buf);
190 result = mf_build_string (buf_contents);
191 pp_clear_output_area (buf);
193 return result;
197 /* And another friend, for producing a simpler message. */
199 static tree
200 mf_file_function_line_tree (location_t location)
202 expanded_location xloc = expand_location (location);
203 const char *file = NULL, *colon, *line, *op, *name, *cp;
204 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
205 char *string;
206 tree result;
208 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
209 file = xloc.file;
210 if (file == NULL && current_function_decl != NULL_TREE)
211 file = DECL_SOURCE_FILE (current_function_decl);
212 if (file == NULL)
213 file = "<unknown file>";
215 if (xloc.line > 0)
217 if (xloc.column > 0)
218 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
219 else
220 sprintf (linecolbuf, "%d", xloc.line);
221 colon = ":";
222 line = linecolbuf;
224 else
225 colon = line = "";
227 /* Add (FUNCTION). */
228 name = lang_hooks.decl_printable_name (current_function_decl, 1);
229 if (name)
231 op = " (";
232 cp = ")";
234 else
235 op = name = cp = "";
237 string = concat (file, colon, line, op, name, cp, NULL);
238 result = mf_build_string (string);
239 free (string);
241 return result;
245 /* global tree nodes */
247 /* Global tree objects for global variables and functions exported by
248 mudflap runtime library. mf_init_extern_trees must be called
249 before using these. */
251 /* uintptr_t (usually "unsigned long") */
252 static GTY (()) tree mf_uintptr_type;
254 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
255 static GTY (()) tree mf_cache_struct_type;
257 /* struct __mf_cache * const */
258 static GTY (()) tree mf_cache_structptr_type;
260 /* extern struct __mf_cache __mf_lookup_cache []; */
261 static GTY (()) tree mf_cache_array_decl;
263 /* extern unsigned char __mf_lc_shift; */
264 static GTY (()) tree mf_cache_shift_decl;
266 /* extern uintptr_t __mf_lc_mask; */
267 static GTY (()) tree mf_cache_mask_decl;
269 /* Their function-scope local shadows, used in single-threaded mode only. */
271 /* auto const unsigned char __mf_lc_shift_l; */
272 static GTY (()) tree mf_cache_shift_decl_l;
274 /* auto const uintptr_t __mf_lc_mask_l; */
275 static GTY (()) tree mf_cache_mask_decl_l;
277 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_check_fndecl;
280 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
281 static GTY (()) tree mf_register_fndecl;
283 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
284 static GTY (()) tree mf_unregister_fndecl;
286 /* extern void __mf_init (); */
287 static GTY (()) tree mf_init_fndecl;
289 /* extern int __mf_set_options (const char*); */
290 static GTY (()) tree mf_set_options_fndecl;
293 /* Helper for mudflap_init: construct a decl with the given category,
294 name, and type, mark it an external reference, and pushdecl it. */
295 static inline tree
296 mf_make_builtin (enum tree_code category, const char *name, tree type)
298 tree decl = mf_mark (build_decl (category, get_identifier (name), type));
299 TREE_PUBLIC (decl) = 1;
300 DECL_EXTERNAL (decl) = 1;
301 lang_hooks.decls.pushdecl (decl);
302 /* The decl was declared by the compiler. */
303 DECL_ARTIFICIAL (decl) = 1;
304 /* And we don't want debug info for it. */
305 DECL_IGNORED_P (decl) = 1;
306 return decl;
309 /* Helper for mudflap_init: construct a tree corresponding to the type
310 struct __mf_cache { uintptr_t low; uintptr_t high; };
311 where uintptr_t is the FIELD_TYPE argument. */
312 static inline tree
313 mf_make_mf_cache_struct_type (tree field_type)
315 /* There is, abominably, no language-independent way to construct a
316 RECORD_TYPE. So we have to call the basic type construction
317 primitives by hand. */
318 tree fieldlo = build_decl (FIELD_DECL, get_identifier ("low"), field_type);
319 tree fieldhi = build_decl (FIELD_DECL, get_identifier ("high"), field_type);
321 tree struct_type = make_node (RECORD_TYPE);
322 DECL_CONTEXT (fieldlo) = struct_type;
323 DECL_CONTEXT (fieldhi) = struct_type;
324 TREE_CHAIN (fieldlo) = fieldhi;
325 TYPE_FIELDS (struct_type) = fieldlo;
326 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
327 layout_type (struct_type);
329 return struct_type;
332 #define build_function_type_0(rtype) \
333 build_function_type (rtype, void_list_node)
334 #define build_function_type_1(rtype, arg1) \
335 build_function_type (rtype, tree_cons (0, arg1, void_list_node))
336 #define build_function_type_3(rtype, arg1, arg2, arg3) \
337 build_function_type (rtype, \
338 tree_cons (0, arg1, \
339 tree_cons (0, arg2, \
340 tree_cons (0, arg3, \
341 void_list_node))))
342 #define build_function_type_4(rtype, arg1, arg2, arg3, arg4) \
343 build_function_type (rtype, \
344 tree_cons (0, arg1, \
345 tree_cons (0, arg2, \
346 tree_cons (0, arg3, \
347 tree_cons (0, arg4, \
348 void_list_node)))))
350 /* Initialize the global tree nodes that correspond to mf-runtime.h
351 declarations. */
352 void
353 mudflap_init (void)
355 static bool done = false;
356 tree mf_const_string_type;
357 tree mf_cache_array_type;
358 tree mf_check_register_fntype;
359 tree mf_unregister_fntype;
360 tree mf_init_fntype;
361 tree mf_set_options_fntype;
363 if (done)
364 return;
365 done = true;
367 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
368 /*unsignedp=*/true);
369 mf_const_string_type
370 = build_pointer_type (build_qualified_type
371 (char_type_node, TYPE_QUAL_CONST));
373 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
374 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
375 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
376 mf_check_register_fntype =
377 build_function_type_4 (void_type_node, ptr_type_node, size_type_node,
378 integer_type_node, mf_const_string_type);
379 mf_unregister_fntype =
380 build_function_type_3 (void_type_node, ptr_type_node, size_type_node,
381 integer_type_node);
382 mf_init_fntype =
383 build_function_type_0 (void_type_node);
384 mf_set_options_fntype =
385 build_function_type_1 (integer_type_node, mf_const_string_type);
387 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
388 mf_cache_array_type);
389 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
390 unsigned_char_type_node);
391 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
392 mf_uintptr_type);
393 /* Don't process these in mudflap_enqueue_decl, should they come by
394 there for some reason. */
395 mf_mark (mf_cache_array_decl);
396 mf_mark (mf_cache_shift_decl);
397 mf_mark (mf_cache_mask_decl);
398 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
399 mf_check_register_fntype);
400 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
401 mf_check_register_fntype);
402 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
403 mf_unregister_fntype);
404 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
405 mf_init_fntype);
406 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
407 mf_set_options_fntype);
409 #undef build_function_type_4
410 #undef build_function_type_3
411 #undef build_function_type_1
412 #undef build_function_type_0
415 /* ------------------------------------------------------------------------ */
416 /* Memory reference transforms. Perform the mudflap indirection-related
417 tree transforms on the current function.
419 This is the second part of the mudflap instrumentation. It works on
420 low-level GIMPLE using the CFG, because we want to run this pass after
421 tree optimizations have been performed, but we have to preserve the CFG
422 for expansion from trees to RTL. */
424 static unsigned int
425 execute_mudflap_function_ops (void)
427 struct gimplify_ctx gctx;
429 /* Don't instrument functions such as the synthetic constructor
430 built during mudflap_finish_file. */
431 if (mf_marked_p (current_function_decl) ||
432 DECL_ARTIFICIAL (current_function_decl))
433 return 0;
435 push_gimplify_context (&gctx);
437 /* In multithreaded mode, don't cache the lookup cache parameters. */
438 if (! flag_mudflap_threads)
439 mf_decl_cache_locals ();
441 mf_xform_derefs ();
443 if (! flag_mudflap_threads)
444 mf_decl_clear_locals ();
446 pop_gimplify_context (NULL);
447 return 0;
450 /* Create and initialize local shadow variables for the lookup cache
451 globals. Put their decls in the *_l globals for use by
452 mf_build_check_statement_for. */
454 static void
455 mf_decl_cache_locals (void)
457 gimple g;
458 gimple_seq seq = gimple_seq_alloc ();
460 /* Build the cache vars. */
461 mf_cache_shift_decl_l
462 = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_shift_decl),
463 "__mf_lookup_shift_l"));
465 mf_cache_mask_decl_l
466 = mf_mark (create_tmp_var (TREE_TYPE (mf_cache_mask_decl),
467 "__mf_lookup_mask_l"));
469 /* Build initialization nodes for the cache vars. We just load the
470 globals into the cache variables. */
471 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
472 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
473 gimple_seq_add_stmt (&seq, g);
475 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
476 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
477 gimple_seq_add_stmt (&seq, g);
479 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
481 gsi_commit_edge_inserts ();
485 static void
486 mf_decl_clear_locals (void)
488 /* Unset local shadows. */
489 mf_cache_shift_decl_l = NULL_TREE;
490 mf_cache_mask_decl_l = NULL_TREE;
493 static void
494 mf_build_check_statement_for (tree base, tree limit,
495 gimple_stmt_iterator *instr_gsi,
496 location_t location, tree dirflag)
498 gimple_stmt_iterator gsi;
499 basic_block cond_bb, then_bb, join_bb;
500 edge e;
501 tree cond, t, u, v;
502 tree mf_base;
503 tree mf_elem;
504 tree mf_limit;
505 gimple g;
506 gimple_seq seq;
508 /* We first need to split the current basic block, and start altering
509 the CFG. This allows us to insert the statements we're about to
510 construct into the right basic blocks. */
512 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
513 gsi = *instr_gsi;
514 gsi_prev (&gsi);
515 if (! gsi_end_p (gsi))
516 e = split_block (cond_bb, gsi_stmt (gsi));
517 else
518 e = split_block_after_labels (cond_bb);
519 cond_bb = e->src;
520 join_bb = e->dest;
522 /* A recap at this point: join_bb is the basic block at whose head
523 is the gimple statement for which this check expression is being
524 built. cond_bb is the (possibly new, synthetic) basic block the
525 end of which will contain the cache-lookup code, and a
526 conditional that jumps to the cache-miss code or, much more
527 likely, over to join_bb. */
529 /* Create the bb that contains the cache-miss fallback block (mf_check). */
530 then_bb = create_empty_bb (cond_bb);
531 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
532 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
534 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
535 e = find_edge (cond_bb, join_bb);
536 e->flags = EDGE_FALSE_VALUE;
537 e->count = cond_bb->count;
538 e->probability = REG_BR_PROB_BASE;
540 /* Update dominance info. Note that bb_join's data was
541 updated by split_block. */
542 if (dom_info_available_p (CDI_DOMINATORS))
544 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
545 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
548 /* Build our local variables. */
549 mf_elem = create_tmp_var (mf_cache_structptr_type, "__mf_elem");
550 mf_base = create_tmp_var (mf_uintptr_type, "__mf_base");
551 mf_limit = create_tmp_var (mf_uintptr_type, "__mf_limit");
553 /* Build: __mf_base = (uintptr_t) <base address expression>. */
554 seq = gimple_seq_alloc ();
555 t = fold_convert (mf_uintptr_type, unshare_expr (base));
556 gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
557 g = gimple_build_assign (mf_base, t);
558 gimple_set_location (g, location);
559 gimple_seq_add_stmt (&seq, g);
561 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
562 t = fold_convert (mf_uintptr_type, unshare_expr (limit));
563 gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
564 g = gimple_build_assign (mf_limit, t);
565 gimple_set_location (g, location);
566 gimple_seq_add_stmt (&seq, g);
568 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
569 & __mf_mask]. */
570 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
571 flag_mudflap_threads ? mf_cache_shift_decl
572 : mf_cache_shift_decl_l);
573 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
574 flag_mudflap_threads ? mf_cache_mask_decl
575 : mf_cache_mask_decl_l);
576 t = build4 (ARRAY_REF,
577 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
578 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
579 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
580 gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
581 g = gimple_build_assign (mf_elem, t);
582 gimple_set_location (g, location);
583 gimple_seq_add_stmt (&seq, g);
585 /* Quick validity check.
587 if (__mf_elem->low > __mf_base
588 || (__mf_elem_high < __mf_limit))
590 __mf_check ();
591 ... and only if single-threaded:
592 __mf_lookup_shift_1 = f...;
593 __mf_lookup_mask_l = ...;
596 It is expected that this body of code is rarely executed so we mark
597 the edge to the THEN clause of the conditional jump as unlikely. */
599 /* Construct t <-- '__mf_elem->low > __mf_base'. */
600 t = build3 (COMPONENT_REF, mf_uintptr_type,
601 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
602 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
603 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
605 /* Construct '__mf_elem->high < __mf_limit'.
607 First build:
608 1) u <-- '__mf_elem->high'
609 2) v <-- '__mf_limit'.
611 Then build 'u <-- (u < v). */
613 u = build3 (COMPONENT_REF, mf_uintptr_type,
614 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
615 TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
617 v = mf_limit;
619 u = build2 (LT_EXPR, boolean_type_node, u, v);
621 /* Build the composed conditional: t <-- 't || u'. Then store the
622 result of the evaluation of 't' in a temporary variable which we
623 can use as the condition for the conditional jump. */
624 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
625 gimplify_expr (&t, &seq, &seq, is_gimple_reg_rhs, fb_rvalue);
626 cond = create_tmp_var (boolean_type_node, "__mf_unlikely_cond");
627 g = gimple_build_assign (cond, t);
628 gimple_set_location (g, location);
629 gimple_seq_add_stmt (&seq, g);
631 /* Build the conditional jump. 'cond' is just a temporary so we can
632 simply build a void COND_EXPR. We do need labels in both arms though. */
633 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, NULL_TREE,
634 NULL_TREE);
635 gimple_set_location (g, location);
636 gimple_seq_add_stmt (&seq, g);
638 /* At this point, after so much hard work, we have only constructed
639 the conditional jump,
641 if (__mf_elem->low > __mf_base
642 || (__mf_elem_high < __mf_limit))
644 The lowered GIMPLE tree representing this code is in the statement
645 list starting at 'head'.
647 We can insert this now in the current basic block, i.e. the one that
648 the statement we're instrumenting was originally in. */
649 gsi = gsi_last_bb (cond_bb);
650 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
652 /* Now build up the body of the cache-miss handling:
654 __mf_check();
655 refresh *_l vars.
657 This is the body of the conditional. */
659 seq = gimple_seq_alloc ();
660 /* u is a string, so it is already a gimple value. */
661 u = mf_file_function_line_tree (location);
662 /* NB: we pass the overall [base..limit] range to mf_check. */
663 v = fold_build2 (PLUS_EXPR, integer_type_node,
664 fold_build2 (MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
665 integer_one_node);
666 gimplify_expr (&v, &seq, &seq, is_gimple_mem_rhs, fb_rvalue);
667 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
668 gimple_seq_add_stmt (&seq, g);
670 if (! flag_mudflap_threads)
672 if (stmt_ends_bb_p (g))
674 gsi = gsi_start_bb (then_bb);
675 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
676 e = split_block (then_bb, g);
677 then_bb = e->dest;
678 seq = gimple_seq_alloc ();
681 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
682 gimple_seq_add_stmt (&seq, g);
684 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
685 gimple_seq_add_stmt (&seq, g);
688 /* Insert the check code in the THEN block. */
689 gsi = gsi_start_bb (then_bb);
690 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
692 *instr_gsi = gsi_start_bb (join_bb);
696 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
697 eligible for instrumentation. For the mudflap1 pass, this implies
698 that it should be registered with the libmudflap runtime. For the
699 mudflap2 pass this means instrumenting an indirection operation with
700 respect to the object.
702 static int
703 mf_decl_eligible_p (tree decl)
705 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
706 /* The decl must have its address taken. In the case of
707 arrays, this flag is also set if the indexes are not
708 compile-time known valid constants. */
709 /* XXX: not sufficient: return-by-value structs! */
710 && TREE_ADDRESSABLE (decl)
711 /* The type of the variable must be complete. */
712 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
713 /* The decl hasn't been decomposed somehow. */
714 && !DECL_HAS_VALUE_EXPR_P (decl));
718 static void
719 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
720 location_t location, tree dirflag)
722 tree type, base, limit, addr, size, t;
724 /* Don't instrument read operations. */
725 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
726 return;
728 /* Don't instrument marked nodes. */
729 if (mf_marked_p (*tp))
730 return;
732 t = *tp;
733 type = TREE_TYPE (t);
735 if (type == error_mark_node)
736 return;
738 size = TYPE_SIZE_UNIT (type);
740 switch (TREE_CODE (t))
742 case ARRAY_REF:
743 case COMPONENT_REF:
745 /* This is trickier than it may first appear. The reason is
746 that we are looking at expressions from the "inside out" at
747 this point. We may have a complex nested aggregate/array
748 expression (e.g. "a.b[i].c"), maybe with an indirection as
749 the leftmost operator ("p->a.b.d"), where instrumentation
750 is necessary. Or we may have an innocent "a.b.c"
751 expression that must not be instrumented. We need to
752 recurse all the way down the nesting structure to figure it
753 out: looking just at the outer node is not enough. */
754 tree var;
755 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
756 /* If we have a bitfield component reference, we must note the
757 innermost addressable object in ELT, from which we will
758 construct the byte-addressable bounds of the bitfield. */
759 tree elt = NULL_TREE;
760 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
761 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
763 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
764 containment hierarchy to find the outermost VAR_DECL. */
765 var = TREE_OPERAND (t, 0);
766 while (1)
768 if (bitfield_ref_p && elt == NULL_TREE
769 && (TREE_CODE (var) == ARRAY_REF
770 || TREE_CODE (var) == COMPONENT_REF))
771 elt = var;
773 if (TREE_CODE (var) == ARRAY_REF)
775 component_ref_only = 0;
776 var = TREE_OPERAND (var, 0);
778 else if (TREE_CODE (var) == COMPONENT_REF)
779 var = TREE_OPERAND (var, 0);
780 else if (INDIRECT_REF_P (var))
782 base = TREE_OPERAND (var, 0);
783 break;
785 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
787 var = TREE_OPERAND (var, 0);
788 if (CONSTANT_CLASS_P (var)
789 && TREE_CODE (var) != STRING_CST)
790 return;
792 else
794 gcc_assert (TREE_CODE (var) == VAR_DECL
795 || TREE_CODE (var) == PARM_DECL
796 || TREE_CODE (var) == RESULT_DECL
797 || TREE_CODE (var) == STRING_CST);
798 /* Don't instrument this access if the underlying
799 variable is not "eligible". This test matches
800 those arrays that have only known-valid indexes,
801 and thus are not labeled TREE_ADDRESSABLE. */
802 if (! mf_decl_eligible_p (var) || component_ref_only)
803 return;
804 else
806 base = build1 (ADDR_EXPR,
807 build_pointer_type (TREE_TYPE (var)), var);
808 break;
813 /* Handle the case of ordinary non-indirection structure
814 accesses. These have only nested COMPONENT_REF nodes (no
815 INDIRECT_REF), but pass through the above filter loop.
816 Note that it's possible for such a struct variable to match
817 the eligible_p test because someone else might take its
818 address sometime. */
820 /* We need special processing for bitfield components, because
821 their addresses cannot be taken. */
822 if (bitfield_ref_p)
824 tree field = TREE_OPERAND (t, 1);
826 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
827 size = DECL_SIZE_UNIT (field);
829 if (elt)
830 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
831 elt);
832 addr = fold_convert (ptr_type_node, elt ? elt : base);
833 addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
834 addr, fold_convert (sizetype,
835 byte_position (field)));
837 else
838 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
840 limit = fold_build2 (MINUS_EXPR, mf_uintptr_type,
841 fold_build2 (PLUS_EXPR, mf_uintptr_type,
842 convert (mf_uintptr_type, addr),
843 size),
844 integer_one_node);
846 break;
848 case INDIRECT_REF:
849 addr = TREE_OPERAND (t, 0);
850 base = addr;
851 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
852 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
853 size),
854 size_int (-1));
855 break;
857 case TARGET_MEM_REF:
858 addr = tree_mem_ref_addr (ptr_type_node, t);
859 base = addr;
860 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
861 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
862 size),
863 size_int (-1));
864 break;
866 case ARRAY_RANGE_REF:
867 warning (OPT_Wmudflap,
868 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
869 return;
871 case BIT_FIELD_REF:
872 /* ??? merge with COMPONENT_REF code above? */
874 tree ofs, rem, bpu;
876 /* If we're not dereferencing something, then the access
877 must be ok. */
878 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
879 return;
881 bpu = bitsize_int (BITS_PER_UNIT);
882 ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
883 rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
884 ofs = fold_convert (sizetype, size_binop (TRUNC_DIV_EXPR, ofs, bpu));
886 size = convert (bitsizetype, TREE_OPERAND (t, 1));
887 size = size_binop (PLUS_EXPR, size, rem);
888 size = size_binop (CEIL_DIV_EXPR, size, bpu);
889 size = convert (sizetype, size);
891 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
892 addr = convert (ptr_type_node, addr);
893 addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, ofs);
895 base = addr;
896 limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
897 fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
898 base, size),
899 size_int (-1));
901 break;
903 default:
904 return;
907 mf_build_check_statement_for (base, limit, iter, location, dirflag);
910 static void
911 mf_xform_derefs (void)
913 basic_block bb, next;
914 gimple_stmt_iterator i;
915 int saved_last_basic_block = last_basic_block;
916 enum gimple_rhs_class grhs_class;
918 bb = ENTRY_BLOCK_PTR ->next_bb;
921 next = bb->next_bb;
922 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
924 gimple s = gsi_stmt (i);
926 /* Only a few GIMPLE statements can reference memory. */
927 switch (gimple_code (s))
929 case GIMPLE_ASSIGN:
930 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
931 gimple_location (s), integer_one_node);
932 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
933 gimple_location (s), integer_zero_node);
934 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
935 if (grhs_class == GIMPLE_BINARY_RHS)
936 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
937 gimple_location (s), integer_zero_node);
938 break;
940 case GIMPLE_RETURN:
941 if (gimple_return_retval (s) != NULL_TREE)
943 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
944 gimple_location (s),
945 integer_zero_node);
947 break;
949 default:
953 bb = next;
955 while (bb && bb->index <= saved_last_basic_block);
958 /* ------------------------------------------------------------------------ */
959 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
960 transforms on the current function.
962 This is the first part of the mudflap instrumentation. It works on
963 high-level GIMPLE because after lowering, all variables are moved out
964 of their BIND_EXPR binding context, and we lose liveness information
965 for the declarations we wish to instrument. */
967 static unsigned int
968 execute_mudflap_function_decls (void)
970 struct gimplify_ctx gctx;
972 /* Don't instrument functions such as the synthetic constructor
973 built during mudflap_finish_file. */
974 if (mf_marked_p (current_function_decl) ||
975 DECL_ARTIFICIAL (current_function_decl))
976 return 0;
978 push_gimplify_context (&gctx);
980 mf_xform_decls (gimple_body (current_function_decl),
981 DECL_ARGUMENTS (current_function_decl));
983 pop_gimplify_context (NULL);
984 return 0;
987 /* This struct is passed between mf_xform_decls to store state needed
988 during the traversal searching for objects that have their
989 addresses taken. */
990 struct mf_xform_decls_data
992 tree param_decls;
996 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
997 _DECLs if appropriate. Arrange to call the __mf_register function
998 now, and the __mf_unregister function later for each. Return the
999 gimple sequence after synthesis. */
1000 gimple_seq
1001 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1003 gimple_seq finally_stmts = NULL;
1004 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1006 while (decl != NULL_TREE)
1008 if (mf_decl_eligible_p (decl)
1009 /* Not already processed. */
1010 && ! mf_marked_p (decl)
1011 /* Automatic variable. */
1012 && ! DECL_EXTERNAL (decl)
1013 && ! TREE_STATIC (decl))
1015 tree size = NULL_TREE, variable_name;
1016 gimple unregister_fncall, register_fncall;
1017 tree unregister_fncall_param, register_fncall_param;
1019 /* Variable-sized objects should have sizes already been
1020 gimplified when we got here. */
1021 size = convert (size_type_node, TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1022 gcc_assert (is_gimple_val (size));
1025 unregister_fncall_param =
1026 mf_mark (build1 (ADDR_EXPR,
1027 build_pointer_type (TREE_TYPE (decl)),
1028 decl));
1029 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1030 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1031 unregister_fncall_param,
1032 size,
1033 build_int_cst (NULL_TREE, 3));
1036 variable_name = mf_varname_tree (decl);
1037 register_fncall_param =
1038 mf_mark (build1 (ADDR_EXPR,
1039 build_pointer_type (TREE_TYPE (decl)),
1040 decl));
1041 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1042 "name") */
1043 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1044 register_fncall_param,
1045 size,
1046 build_int_cst (NULL_TREE, 3),
1047 variable_name);
1050 /* Accumulate the two calls. */
1051 gimple_set_location (register_fncall, location);
1052 gimple_set_location (unregister_fncall, location);
1054 /* Add the __mf_register call at the current appending point. */
1055 if (gsi_end_p (initially_stmts))
1057 if (!DECL_ARTIFICIAL (decl))
1058 warning (OPT_Wmudflap,
1059 "mudflap cannot track %qs in stub function",
1060 IDENTIFIER_POINTER (DECL_NAME (decl)));
1062 else
1064 gsi_insert_before (&initially_stmts, register_fncall,
1065 GSI_SAME_STMT);
1067 /* Accumulate the FINALLY piece. */
1068 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1070 mf_mark (decl);
1073 decl = TREE_CHAIN (decl);
1076 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1077 if (finally_stmts != NULL)
1079 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1080 gimple_seq new_seq = gimple_seq_alloc ();
1082 gimple_seq_add_stmt (&new_seq, stmt);
1083 return new_seq;
1085 else
1086 return seq;
1090 /* Process every variable mentioned in BIND_EXPRs. */
1091 static tree
1092 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1093 bool *handled_operands_p ATTRIBUTE_UNUSED,
1094 struct walk_stmt_info *wi)
1096 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1097 gimple stmt = gsi_stmt (*gsi);
1099 switch (gimple_code (stmt))
1101 case GIMPLE_BIND:
1103 /* Process function parameters now (but only once). */
1104 if (d->param_decls)
1106 gimple_bind_set_body (stmt,
1107 mx_register_decls (d->param_decls,
1108 gimple_bind_body (stmt),
1109 gimple_location (stmt)));
1110 d->param_decls = NULL_TREE;
1113 gimple_bind_set_body (stmt,
1114 mx_register_decls (gimple_bind_vars (stmt),
1115 gimple_bind_body (stmt),
1116 gimple_location (stmt)));
1118 break;
1120 default:
1121 break;
1124 return NULL_TREE;
1127 /* Perform the object lifetime tracking mudflap transform on the given function
1128 tree. The tree is mutated in place, with possibly copied subtree nodes.
1130 For every auto variable declared, if its address is ever taken
1131 within the function, then supply its lifetime to the mudflap
1132 runtime with the __mf_register and __mf_unregister calls.
1135 static void
1136 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1138 struct mf_xform_decls_data d;
1139 struct walk_stmt_info wi;
1140 struct pointer_set_t *pset = pointer_set_create ();
1142 d.param_decls = fnparams;
1143 memset (&wi, 0, sizeof (wi));
1144 wi.info = (void*) &d;
1145 wi.pset = pset;
1146 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1147 pointer_set_destroy (pset);
1151 /* ------------------------------------------------------------------------ */
1152 /* Externally visible mudflap functions. */
1155 /* Mark and return the given tree node to prevent further mudflap
1156 transforms. */
1157 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1159 tree
1160 mf_mark (tree t)
1162 void **slot;
1164 if (marked_trees == NULL)
1165 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1166 NULL);
1168 slot = htab_find_slot (marked_trees, t, INSERT);
1169 *slot = t;
1170 return t;
1174 mf_marked_p (tree t)
1176 void *entry;
1178 if (marked_trees == NULL)
1179 return 0;
1181 entry = htab_find (marked_trees, t);
1182 return (entry != NULL);
1185 /* Remember given node as a static of some kind: global data,
1186 function-scope static, or an anonymous constant. Its assembler
1187 label is given. */
1189 /* A list of globals whose incomplete declarations we encountered.
1190 Instead of emitting the __mf_register call for them here, it's
1191 delayed until program finish time. If they're still incomplete by
1192 then, warnings are emitted. */
1194 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1196 /* A list of statements for calling __mf_register() at startup time. */
1197 static GTY (()) tree enqueued_call_stmt_chain;
1199 static void
1200 mudflap_register_call (tree obj, tree object_size, tree varname)
1202 tree arg, call_stmt;
1204 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1205 arg = convert (ptr_type_node, arg);
1207 call_stmt = build_call_expr (mf_register_fndecl, 4,
1208 arg,
1209 convert (size_type_node, object_size),
1210 /* __MF_TYPE_STATIC */
1211 build_int_cst (NULL_TREE, 4),
1212 varname);
1214 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1217 void
1218 mudflap_enqueue_decl (tree obj)
1220 if (mf_marked_p (obj))
1221 return;
1223 /* We don't need to process variable decls that are internally
1224 generated extern. If we did, we'd end up with warnings for them
1225 during mudflap_finish_file (). That would confuse the user,
1226 since the text would refer to variables that don't show up in the
1227 user's source code. */
1228 if (DECL_P (obj) && DECL_EXTERNAL (obj) && DECL_ARTIFICIAL (obj))
1229 return;
1231 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1235 void
1236 mudflap_enqueue_constant (tree obj)
1238 tree object_size, varname;
1240 if (mf_marked_p (obj))
1241 return;
1243 if (TREE_CODE (obj) == STRING_CST)
1244 object_size = build_int_cst (NULL_TREE, TREE_STRING_LENGTH (obj));
1245 else
1246 object_size = size_in_bytes (TREE_TYPE (obj));
1248 if (TREE_CODE (obj) == STRING_CST)
1249 varname = mf_build_string ("string literal");
1250 else
1251 varname = mf_build_string ("constant");
1253 mudflap_register_call (obj, object_size, varname);
1257 /* Emit any file-wide instrumentation. */
1258 void
1259 mudflap_finish_file (void)
1261 tree ctor_statements = NULL_TREE;
1263 /* No need to continue when there were errors. */
1264 if (errorcount != 0 || sorrycount != 0)
1265 return;
1267 /* Insert a call to __mf_init. */
1269 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1270 append_to_statement_list (call2_stmt, &ctor_statements);
1273 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1274 if (flag_mudflap_ignore_reads)
1276 tree arg = mf_build_string ("-ignore-reads");
1277 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1278 append_to_statement_list (call_stmt, &ctor_statements);
1281 /* Process all enqueued object decls. */
1282 if (deferred_static_decls)
1284 size_t i;
1285 tree obj;
1286 for (i = 0; VEC_iterate (tree, deferred_static_decls, i, obj); i++)
1288 gcc_assert (DECL_P (obj));
1290 if (mf_marked_p (obj))
1291 continue;
1293 /* Omit registration for static unaddressed objects. NB:
1294 Perform registration for non-static objects regardless of
1295 TREE_USED or TREE_ADDRESSABLE, because they may be used
1296 from other compilation units. */
1297 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1298 continue;
1300 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1302 warning (OPT_Wmudflap,
1303 "mudflap cannot track unknown size extern %qs",
1304 IDENTIFIER_POINTER (DECL_NAME (obj)));
1305 continue;
1308 mudflap_register_call (obj,
1309 size_in_bytes (TREE_TYPE (obj)),
1310 mf_varname_tree (obj));
1313 VEC_truncate (tree, deferred_static_decls, 0);
1316 /* Append all the enqueued registration calls. */
1317 if (enqueued_call_stmt_chain)
1319 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1320 enqueued_call_stmt_chain = NULL_TREE;
1323 cgraph_build_static_cdtor ('I', ctor_statements,
1324 MAX_RESERVED_INIT_PRIORITY-1);
1328 static bool
1329 gate_mudflap (void)
1331 return flag_mudflap != 0;
1334 struct gimple_opt_pass pass_mudflap_1 =
1337 GIMPLE_PASS,
1338 "mudflap1", /* name */
1339 gate_mudflap, /* gate */
1340 execute_mudflap_function_decls, /* execute */
1341 NULL, /* sub */
1342 NULL, /* next */
1343 0, /* static_pass_number */
1344 0, /* tv_id */
1345 PROP_gimple_any, /* properties_required */
1346 0, /* properties_provided */
1347 0, /* properties_destroyed */
1348 0, /* todo_flags_start */
1349 TODO_dump_func /* todo_flags_finish */
1353 struct gimple_opt_pass pass_mudflap_2 =
1356 GIMPLE_PASS,
1357 "mudflap2", /* name */
1358 gate_mudflap, /* gate */
1359 execute_mudflap_function_ops, /* execute */
1360 NULL, /* sub */
1361 NULL, /* next */
1362 0, /* static_pass_number */
1363 0, /* tv_id */
1364 PROP_gimple_leh, /* properties_required */
1365 0, /* properties_provided */
1366 0, /* properties_destroyed */
1367 0, /* todo_flags_start */
1368 TODO_verify_flow | TODO_verify_stmts
1369 | TODO_dump_func /* todo_flags_finish */
1373 #include "gt-tree-mudflap.h"