1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
30 #include "basic-block.h"
33 #include "tree-inline.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
41 #include "diagnostic.h"
43 #include "langhooks.h"
48 /* Internal function decls */
52 #define flag_mudflap_threads (flag_mudflap == 2)
55 static tree
mf_build_string (const char *string
);
56 static tree
mf_varname_tree (tree
);
57 static tree
mf_file_function_line_tree (location_t
);
59 /* Indirection-related instrumentation. */
60 static void mf_decl_cache_locals (void);
61 static void mf_decl_clear_locals (void);
62 static void mf_xform_statements (void);
63 static unsigned int execute_mudflap_function_ops (void);
65 /* Addressable variables instrumentation. */
66 static void mf_xform_decls (gimple_seq
, tree
);
67 static tree
mx_xfn_xform_decls (gimple_stmt_iterator
*, bool *,
68 struct walk_stmt_info
*);
69 static gimple_seq
mx_register_decls (tree
, gimple_seq
, location_t
);
70 static unsigned int execute_mudflap_function_decls (void);
73 /* ------------------------------------------------------------------------ */
74 /* Some generally helpful functions for mudflap instrumentation. */
76 /* Build a reference to a literal string. */
78 mf_build_string (const char *string
)
80 size_t len
= strlen (string
);
81 tree result
= mf_mark (build_string (len
+ 1, string
));
83 TREE_TYPE (result
) = build_array_type
84 (char_type_node
, build_index_type (size_int (len
)));
85 TREE_CONSTANT (result
) = 1;
86 TREE_READONLY (result
) = 1;
87 TREE_STATIC (result
) = 1;
89 result
= build1 (ADDR_EXPR
, build_pointer_type (char_type_node
), result
);
91 return mf_mark (result
);
94 /* Create a properly typed STRING_CST node that describes the given
95 declaration. It will be used as an argument for __mf_register().
96 Try to construct a helpful string, including file/function/variable
100 mf_varname_tree (tree decl
)
102 static pretty_printer buf_rec
;
103 static int initialized
= 0;
104 pretty_printer
*buf
= & buf_rec
;
105 const char *buf_contents
;
112 pp_construct (buf
, /* prefix */ NULL
, /* line-width */ 0);
115 pp_clear_output_area (buf
);
117 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
119 expanded_location xloc
= expand_location (DECL_SOURCE_LOCATION (decl
));
120 const char *sourcefile
;
121 unsigned sourceline
= xloc
.line
;
122 unsigned sourcecolumn
= 0;
123 sourcecolumn
= xloc
.column
;
124 sourcefile
= xloc
.file
;
125 if (sourcefile
== NULL
&& current_function_decl
!= NULL_TREE
)
126 sourcefile
= DECL_SOURCE_FILE (current_function_decl
);
127 if (sourcefile
== NULL
)
128 sourcefile
= "<unknown file>";
130 pp_string (buf
, sourcefile
);
134 pp_string (buf
, ":");
135 pp_decimal_int (buf
, sourceline
);
137 if (sourcecolumn
!= 0)
139 pp_string (buf
, ":");
140 pp_decimal_int (buf
, sourcecolumn
);
145 if (current_function_decl
!= NULL_TREE
)
148 pp_string (buf
, " (");
150 const char *funcname
= NULL
;
151 if (DECL_NAME (current_function_decl
))
152 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 1);
153 if (funcname
== NULL
)
154 funcname
= "anonymous fn";
156 pp_string (buf
, funcname
);
158 pp_string (buf
, ") ");
161 pp_string (buf
, " ");
163 /* Add <variable-declaration>, possibly demangled. */
165 const char *declname
= NULL
;
167 if (DECL_NAME (decl
) != NULL
)
169 if (strcmp ("GNU C++", lang_hooks
.name
) == 0)
171 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
172 the libiberty demangler. */
173 declname
= cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl
)),
174 DMGL_AUTO
| DMGL_VERBOSE
);
176 if (declname
== NULL
)
177 declname
= lang_hooks
.decl_printable_name (decl
, 3);
179 if (declname
== NULL
)
180 declname
= "<unnamed variable>";
182 pp_string (buf
, declname
);
185 /* Return the lot as a new STRING_CST. */
186 buf_contents
= pp_base_formatted_text (buf
);
187 result
= mf_build_string (buf_contents
);
188 pp_clear_output_area (buf
);
194 /* And another friend, for producing a simpler message. */
197 mf_file_function_line_tree (location_t location
)
199 expanded_location xloc
= expand_location (location
);
200 const char *file
= NULL
, *colon
, *line
, *op
, *name
, *cp
;
201 char linecolbuf
[30]; /* Enough for two decimal numbers plus a colon. */
205 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
207 if (file
== NULL
&& current_function_decl
!= NULL_TREE
)
208 file
= DECL_SOURCE_FILE (current_function_decl
);
210 file
= "<unknown file>";
215 sprintf (linecolbuf
, "%d:%d", xloc
.line
, xloc
.column
);
217 sprintf (linecolbuf
, "%d", xloc
.line
);
224 /* Add (FUNCTION). */
225 name
= lang_hooks
.decl_printable_name (current_function_decl
, 1);
234 string
= concat (file
, colon
, line
, op
, name
, cp
, NULL
);
235 result
= mf_build_string (string
);
242 /* global tree nodes */
244 /* Global tree objects for global variables and functions exported by
245 mudflap runtime library. mf_init_extern_trees must be called
246 before using these. */
248 /* uintptr_t (usually "unsigned long") */
249 static GTY (()) tree mf_uintptr_type
;
251 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
252 static GTY (()) tree mf_cache_struct_type
;
254 /* struct __mf_cache * const */
255 static GTY (()) tree mf_cache_structptr_type
;
257 /* extern struct __mf_cache __mf_lookup_cache []; */
258 static GTY (()) tree mf_cache_array_decl
;
260 /* extern unsigned char __mf_lc_shift; */
261 static GTY (()) tree mf_cache_shift_decl
;
263 /* extern uintptr_t __mf_lc_mask; */
264 static GTY (()) tree mf_cache_mask_decl
;
266 /* Their function-scope local shadows, used in single-threaded mode only. */
268 /* auto const unsigned char __mf_lc_shift_l; */
269 static GTY (()) tree mf_cache_shift_decl_l
;
271 /* auto const uintptr_t __mf_lc_mask_l; */
272 static GTY (()) tree mf_cache_mask_decl_l
;
274 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
275 static GTY (()) tree mf_check_fndecl
;
277 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_register_fndecl
;
280 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
281 static GTY (()) tree mf_unregister_fndecl
;
283 /* extern void __mf_init (); */
284 static GTY (()) tree mf_init_fndecl
;
286 /* extern int __mf_set_options (const char*); */
287 static GTY (()) tree mf_set_options_fndecl
;
290 /* Helper for mudflap_init: construct a decl with the given category,
291 name, and type, mark it an external reference, and pushdecl it. */
293 mf_make_builtin (enum tree_code category
, const char *name
, tree type
)
295 tree decl
= mf_mark (build_decl (UNKNOWN_LOCATION
,
296 category
, get_identifier (name
), type
));
297 TREE_PUBLIC (decl
) = 1;
298 DECL_EXTERNAL (decl
) = 1;
299 lang_hooks
.decls
.pushdecl (decl
);
300 /* The decl was declared by the compiler. */
301 DECL_ARTIFICIAL (decl
) = 1;
302 /* And we don't want debug info for it. */
303 DECL_IGNORED_P (decl
) = 1;
307 /* Helper for mudflap_init: construct a tree corresponding to the type
308 struct __mf_cache { uintptr_t low; uintptr_t high; };
309 where uintptr_t is the FIELD_TYPE argument. */
311 mf_make_mf_cache_struct_type (tree field_type
)
313 /* There is, abominably, no language-independent way to construct a
314 RECORD_TYPE. So we have to call the basic type construction
315 primitives by hand. */
316 tree fieldlo
= build_decl (UNKNOWN_LOCATION
,
317 FIELD_DECL
, get_identifier ("low"), field_type
);
318 tree fieldhi
= build_decl (UNKNOWN_LOCATION
,
319 FIELD_DECL
, get_identifier ("high"), field_type
);
321 tree struct_type
= make_node (RECORD_TYPE
);
322 DECL_CONTEXT (fieldlo
) = struct_type
;
323 DECL_CONTEXT (fieldhi
) = struct_type
;
324 DECL_CHAIN (fieldlo
) = fieldhi
;
325 TYPE_FIELDS (struct_type
) = fieldlo
;
326 TYPE_NAME (struct_type
) = get_identifier ("__mf_cache");
327 layout_type (struct_type
);
332 /* Initialize the global tree nodes that correspond to mf-runtime.h
337 static bool done
= false;
338 tree mf_const_string_type
;
339 tree mf_cache_array_type
;
340 tree mf_check_register_fntype
;
341 tree mf_unregister_fntype
;
343 tree mf_set_options_fntype
;
349 mf_uintptr_type
= lang_hooks
.types
.type_for_mode (ptr_mode
,
352 = build_pointer_type (build_qualified_type
353 (char_type_node
, TYPE_QUAL_CONST
));
355 mf_cache_struct_type
= mf_make_mf_cache_struct_type (mf_uintptr_type
);
356 mf_cache_structptr_type
= build_pointer_type (mf_cache_struct_type
);
357 mf_cache_array_type
= build_array_type (mf_cache_struct_type
, 0);
358 mf_check_register_fntype
=
359 build_function_type_list (void_type_node
, ptr_type_node
, size_type_node
,
360 integer_type_node
, mf_const_string_type
, NULL_TREE
);
361 mf_unregister_fntype
=
362 build_function_type_list (void_type_node
, ptr_type_node
, size_type_node
,
363 integer_type_node
, NULL_TREE
);
365 build_function_type_list (void_type_node
, NULL_TREE
);
366 mf_set_options_fntype
=
367 build_function_type_list (integer_type_node
, mf_const_string_type
, NULL_TREE
);
369 mf_cache_array_decl
= mf_make_builtin (VAR_DECL
, "__mf_lookup_cache",
370 mf_cache_array_type
);
371 mf_cache_shift_decl
= mf_make_builtin (VAR_DECL
, "__mf_lc_shift",
372 unsigned_char_type_node
);
373 mf_cache_mask_decl
= mf_make_builtin (VAR_DECL
, "__mf_lc_mask",
375 /* Don't process these in mudflap_enqueue_decl, should they come by
376 there for some reason. */
377 mf_mark (mf_cache_array_decl
);
378 mf_mark (mf_cache_shift_decl
);
379 mf_mark (mf_cache_mask_decl
);
380 mf_check_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_check",
381 mf_check_register_fntype
);
382 mf_register_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_register",
383 mf_check_register_fntype
);
384 mf_unregister_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_unregister",
385 mf_unregister_fntype
);
386 mf_init_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_init",
388 mf_set_options_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_set_options",
389 mf_set_options_fntype
);
393 /* ------------------------------------------------------------------------ */
394 /* This is the second part of the mudflap instrumentation. It works on
395 low-level GIMPLE using the CFG, because we want to run this pass after
396 tree optimizations have been performed, but we have to preserve the CFG
397 for expansion from trees to RTL.
398 Below is the list of transformations performed on statements in the
401 1) Memory reference transforms: Perform the mudflap indirection-related
402 tree transforms on memory references.
404 2) Mark BUILTIN_ALLOCA calls not inlineable.
409 execute_mudflap_function_ops (void)
411 struct gimplify_ctx gctx
;
413 /* Don't instrument functions such as the synthetic constructor
414 built during mudflap_finish_file. */
415 if (mf_marked_p (current_function_decl
) ||
416 DECL_ARTIFICIAL (current_function_decl
))
419 push_gimplify_context (&gctx
);
421 /* In multithreaded mode, don't cache the lookup cache parameters. */
422 if (! flag_mudflap_threads
)
423 mf_decl_cache_locals ();
425 mf_xform_statements ();
427 if (! flag_mudflap_threads
)
428 mf_decl_clear_locals ();
430 pop_gimplify_context (NULL
);
434 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
435 if BB has more than one edge, STMT will be replicated for each edge.
436 Also, abnormal edges will be ignored. */
439 insert_edge_copies_seq (gimple_seq seq
, basic_block bb
)
443 unsigned n_copies
= -1;
445 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
446 if (!(e
->flags
& EDGE_ABNORMAL
))
449 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
450 if (!(e
->flags
& EDGE_ABNORMAL
))
451 gsi_insert_seq_on_edge (e
, n_copies
-- > 0 ? gimple_seq_copy (seq
) : seq
);
454 /* Create and initialize local shadow variables for the lookup cache
455 globals. Put their decls in the *_l globals for use by
456 mf_build_check_statement_for. */
459 mf_decl_cache_locals (void)
462 gimple_seq seq
= gimple_seq_alloc ();
464 /* Build the cache vars. */
465 mf_cache_shift_decl_l
466 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl
),
467 "__mf_lookup_shift_l"));
470 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl
),
471 "__mf_lookup_mask_l"));
473 /* Build initialization nodes for the cache vars. We just load the
474 globals into the cache variables. */
475 g
= gimple_build_assign (mf_cache_shift_decl_l
, mf_cache_shift_decl
);
476 gimple_set_location (g
, DECL_SOURCE_LOCATION (current_function_decl
));
477 gimple_seq_add_stmt (&seq
, g
);
479 g
= gimple_build_assign (mf_cache_mask_decl_l
, mf_cache_mask_decl
);
480 gimple_set_location (g
, DECL_SOURCE_LOCATION (current_function_decl
));
481 gimple_seq_add_stmt (&seq
, g
);
483 insert_edge_copies_seq (seq
, ENTRY_BLOCK_PTR
);
485 gsi_commit_edge_inserts ();
490 mf_decl_clear_locals (void)
492 /* Unset local shadows. */
493 mf_cache_shift_decl_l
= NULL_TREE
;
494 mf_cache_mask_decl_l
= NULL_TREE
;
498 mf_build_check_statement_for (tree base
, tree limit
,
499 gimple_stmt_iterator
*instr_gsi
,
500 location_t location
, tree dirflag
)
502 gimple_stmt_iterator gsi
;
503 basic_block cond_bb
, then_bb
, join_bb
;
510 gimple_seq seq
, stmts
;
512 /* We first need to split the current basic block, and start altering
513 the CFG. This allows us to insert the statements we're about to
514 construct into the right basic blocks. */
516 cond_bb
= gimple_bb (gsi_stmt (*instr_gsi
));
519 if (! gsi_end_p (gsi
))
520 e
= split_block (cond_bb
, gsi_stmt (gsi
));
522 e
= split_block_after_labels (cond_bb
);
526 /* A recap at this point: join_bb is the basic block at whose head
527 is the gimple statement for which this check expression is being
528 built. cond_bb is the (possibly new, synthetic) basic block the
529 end of which will contain the cache-lookup code, and a
530 conditional that jumps to the cache-miss code or, much more
531 likely, over to join_bb. */
533 /* Create the bb that contains the cache-miss fallback block (mf_check). */
534 then_bb
= create_empty_bb (cond_bb
);
535 make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
536 make_single_succ_edge (then_bb
, join_bb
, EDGE_FALLTHRU
);
538 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
539 e
= find_edge (cond_bb
, join_bb
);
540 e
->flags
= EDGE_FALSE_VALUE
;
541 e
->count
= cond_bb
->count
;
542 e
->probability
= REG_BR_PROB_BASE
;
544 /* Update dominance info. Note that bb_join's data was
545 updated by split_block. */
546 if (dom_info_available_p (CDI_DOMINATORS
))
548 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
549 set_immediate_dominator (CDI_DOMINATORS
, join_bb
, cond_bb
);
552 /* Build our local variables. */
553 mf_elem
= make_rename_temp (mf_cache_structptr_type
, "__mf_elem");
554 mf_base
= make_rename_temp (mf_uintptr_type
, "__mf_base");
555 mf_limit
= make_rename_temp (mf_uintptr_type
, "__mf_limit");
557 /* Build: __mf_base = (uintptr_t) <base address expression>. */
558 seq
= gimple_seq_alloc ();
559 t
= fold_convert_loc (location
, mf_uintptr_type
,
560 unshare_expr (base
));
561 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
562 gimple_seq_add_seq (&seq
, stmts
);
563 g
= gimple_build_assign (mf_base
, t
);
564 gimple_set_location (g
, location
);
565 gimple_seq_add_stmt (&seq
, g
);
567 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
568 t
= fold_convert_loc (location
, mf_uintptr_type
,
569 unshare_expr (limit
));
570 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
571 gimple_seq_add_seq (&seq
, stmts
);
572 g
= gimple_build_assign (mf_limit
, t
);
573 gimple_set_location (g
, location
);
574 gimple_seq_add_stmt (&seq
, g
);
576 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
578 t
= build2 (RSHIFT_EXPR
, mf_uintptr_type
, mf_base
,
579 flag_mudflap_threads
? mf_cache_shift_decl
580 : mf_cache_shift_decl_l
);
581 t
= build2 (BIT_AND_EXPR
, mf_uintptr_type
, t
,
582 flag_mudflap_threads
? mf_cache_mask_decl
583 : mf_cache_mask_decl_l
);
584 t
= build4 (ARRAY_REF
,
585 TREE_TYPE (TREE_TYPE (mf_cache_array_decl
)),
586 mf_cache_array_decl
, t
, NULL_TREE
, NULL_TREE
);
587 t
= build1 (ADDR_EXPR
, mf_cache_structptr_type
, t
);
588 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
589 gimple_seq_add_seq (&seq
, stmts
);
590 g
= gimple_build_assign (mf_elem
, t
);
591 gimple_set_location (g
, location
);
592 gimple_seq_add_stmt (&seq
, g
);
594 /* Quick validity check.
596 if (__mf_elem->low > __mf_base
597 || (__mf_elem_high < __mf_limit))
600 ... and only if single-threaded:
601 __mf_lookup_shift_1 = f...;
602 __mf_lookup_mask_l = ...;
605 It is expected that this body of code is rarely executed so we mark
606 the edge to the THEN clause of the conditional jump as unlikely. */
608 /* Construct t <-- '__mf_elem->low > __mf_base'. */
609 t
= build3 (COMPONENT_REF
, mf_uintptr_type
,
610 build1 (INDIRECT_REF
, mf_cache_struct_type
, mf_elem
),
611 TYPE_FIELDS (mf_cache_struct_type
), NULL_TREE
);
612 t
= build2 (GT_EXPR
, boolean_type_node
, t
, mf_base
);
614 /* Construct '__mf_elem->high < __mf_limit'.
617 1) u <-- '__mf_elem->high'
618 2) v <-- '__mf_limit'.
620 Then build 'u <-- (u < v). */
622 u
= build3 (COMPONENT_REF
, mf_uintptr_type
,
623 build1 (INDIRECT_REF
, mf_cache_struct_type
, mf_elem
),
624 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type
)), NULL_TREE
);
628 u
= build2 (LT_EXPR
, boolean_type_node
, u
, v
);
630 /* Build the composed conditional: t <-- 't || u'. Then store the
631 result of the evaluation of 't' in a temporary variable which we
632 can use as the condition for the conditional jump. */
633 t
= build2 (TRUTH_OR_EXPR
, boolean_type_node
, t
, u
);
634 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
635 gimple_seq_add_seq (&seq
, stmts
);
636 cond
= make_rename_temp (boolean_type_node
, "__mf_unlikely_cond");
637 g
= gimple_build_assign (cond
, t
);
638 gimple_set_location (g
, location
);
639 gimple_seq_add_stmt (&seq
, g
);
641 /* Build the conditional jump. 'cond' is just a temporary so we can
642 simply build a void COND_EXPR. We do need labels in both arms though. */
643 g
= gimple_build_cond (NE_EXPR
, cond
, boolean_false_node
, NULL_TREE
,
645 gimple_set_location (g
, location
);
646 gimple_seq_add_stmt (&seq
, g
);
648 /* At this point, after so much hard work, we have only constructed
649 the conditional jump,
651 if (__mf_elem->low > __mf_base
652 || (__mf_elem_high < __mf_limit))
654 The lowered GIMPLE tree representing this code is in the statement
655 list starting at 'head'.
657 We can insert this now in the current basic block, i.e. the one that
658 the statement we're instrumenting was originally in. */
659 gsi
= gsi_last_bb (cond_bb
);
660 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
662 /* Now build up the body of the cache-miss handling:
667 This is the body of the conditional. */
669 seq
= gimple_seq_alloc ();
670 /* u is a string, so it is already a gimple value. */
671 u
= mf_file_function_line_tree (location
);
672 /* NB: we pass the overall [base..limit] range to mf_check. */
673 v
= fold_build2_loc (location
, PLUS_EXPR
, mf_uintptr_type
,
674 fold_build2_loc (location
,
675 MINUS_EXPR
, mf_uintptr_type
, mf_limit
, mf_base
),
676 build_int_cst (mf_uintptr_type
, 1));
677 v
= force_gimple_operand (v
, &stmts
, true, NULL_TREE
);
678 gimple_seq_add_seq (&seq
, stmts
);
679 g
= gimple_build_call (mf_check_fndecl
, 4, mf_base
, v
, dirflag
, u
);
680 gimple_seq_add_stmt (&seq
, g
);
682 if (! flag_mudflap_threads
)
684 if (stmt_ends_bb_p (g
))
686 gsi
= gsi_start_bb (then_bb
);
687 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
688 e
= split_block (then_bb
, g
);
690 seq
= gimple_seq_alloc ();
693 g
= gimple_build_assign (mf_cache_shift_decl_l
, mf_cache_shift_decl
);
694 gimple_seq_add_stmt (&seq
, g
);
696 g
= gimple_build_assign (mf_cache_mask_decl_l
, mf_cache_mask_decl
);
697 gimple_seq_add_stmt (&seq
, g
);
700 /* Insert the check code in the THEN block. */
701 gsi
= gsi_start_bb (then_bb
);
702 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
704 *instr_gsi
= gsi_start_bb (join_bb
);
708 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
709 eligible for instrumentation. For the mudflap1 pass, this implies
710 that it should be registered with the libmudflap runtime. For the
711 mudflap2 pass this means instrumenting an indirection operation with
712 respect to the object.
715 mf_decl_eligible_p (tree decl
)
717 return ((TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == PARM_DECL
)
718 /* The decl must have its address taken. In the case of
719 arrays, this flag is also set if the indexes are not
720 compile-time known valid constants. */
721 /* XXX: not sufficient: return-by-value structs! */
722 && TREE_ADDRESSABLE (decl
)
723 /* The type of the variable must be complete. */
724 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl
))
725 /* The decl hasn't been decomposed somehow. */
726 && !DECL_HAS_VALUE_EXPR_P (decl
));
731 mf_xform_derefs_1 (gimple_stmt_iterator
*iter
, tree
*tp
,
732 location_t location
, tree dirflag
)
734 tree type
, base
, limit
, addr
, size
, t
;
736 /* Don't instrument read operations. */
737 if (dirflag
== integer_zero_node
&& flag_mudflap_ignore_reads
)
740 /* Don't instrument marked nodes. */
741 if (mf_marked_p (*tp
))
745 type
= TREE_TYPE (t
);
747 if (type
== error_mark_node
)
750 size
= TYPE_SIZE_UNIT (type
);
752 switch (TREE_CODE (t
))
757 /* This is trickier than it may first appear. The reason is
758 that we are looking at expressions from the "inside out" at
759 this point. We may have a complex nested aggregate/array
760 expression (e.g. "a.b[i].c"), maybe with an indirection as
761 the leftmost operator ("p->a.b.d"), where instrumentation
762 is necessary. Or we may have an innocent "a.b.c"
763 expression that must not be instrumented. We need to
764 recurse all the way down the nesting structure to figure it
765 out: looking just at the outer node is not enough. */
767 int component_ref_only
= (TREE_CODE (t
) == COMPONENT_REF
);
768 /* If we have a bitfield component reference, we must note the
769 innermost addressable object in ELT, from which we will
770 construct the byte-addressable bounds of the bitfield. */
771 tree elt
= NULL_TREE
;
772 int bitfield_ref_p
= (TREE_CODE (t
) == COMPONENT_REF
773 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t
, 1)));
775 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
776 containment hierarchy to find the outermost VAR_DECL. */
777 var
= TREE_OPERAND (t
, 0);
780 if (bitfield_ref_p
&& elt
== NULL_TREE
781 && (TREE_CODE (var
) == ARRAY_REF
782 || TREE_CODE (var
) == COMPONENT_REF
))
785 if (TREE_CODE (var
) == ARRAY_REF
)
787 component_ref_only
= 0;
788 var
= TREE_OPERAND (var
, 0);
790 else if (TREE_CODE (var
) == COMPONENT_REF
)
791 var
= TREE_OPERAND (var
, 0);
792 else if (INDIRECT_REF_P (var
)
793 || TREE_CODE (var
) == MEM_REF
)
795 base
= TREE_OPERAND (var
, 0);
798 else if (TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
800 var
= TREE_OPERAND (var
, 0);
801 if (CONSTANT_CLASS_P (var
)
802 && TREE_CODE (var
) != STRING_CST
)
807 gcc_assert (TREE_CODE (var
) == VAR_DECL
808 || TREE_CODE (var
) == PARM_DECL
809 || TREE_CODE (var
) == RESULT_DECL
810 || TREE_CODE (var
) == STRING_CST
);
811 /* Don't instrument this access if the underlying
812 variable is not "eligible". This test matches
813 those arrays that have only known-valid indexes,
814 and thus are not labeled TREE_ADDRESSABLE. */
815 if (! mf_decl_eligible_p (var
) || component_ref_only
)
819 base
= build1 (ADDR_EXPR
,
820 build_pointer_type (TREE_TYPE (var
)), var
);
826 /* Handle the case of ordinary non-indirection structure
827 accesses. These have only nested COMPONENT_REF nodes (no
828 INDIRECT_REF), but pass through the above filter loop.
829 Note that it's possible for such a struct variable to match
830 the eligible_p test because someone else might take its
833 /* We need special processing for bitfield components, because
834 their addresses cannot be taken. */
837 tree field
= TREE_OPERAND (t
, 1);
839 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
)
840 size
= DECL_SIZE_UNIT (field
);
843 elt
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (elt
)),
845 addr
= fold_convert_loc (location
, ptr_type_node
, elt
? elt
: base
);
846 addr
= fold_build_pointer_plus_loc (location
,
847 addr
, byte_position (field
));
850 addr
= build1 (ADDR_EXPR
, build_pointer_type (type
), t
);
852 limit
= fold_build2_loc (location
, MINUS_EXPR
, mf_uintptr_type
,
853 fold_build2_loc (location
, PLUS_EXPR
, mf_uintptr_type
,
854 fold_convert (mf_uintptr_type
, addr
),
861 addr
= TREE_OPERAND (t
, 0);
863 limit
= fold_build_pointer_plus_hwi_loc
864 (location
, fold_build_pointer_plus_loc (location
, base
, size
), -1);
868 addr
= fold_build_pointer_plus_loc (location
, TREE_OPERAND (t
, 0),
869 TREE_OPERAND (t
, 1));
871 limit
= fold_build_pointer_plus_hwi_loc (location
,
872 fold_build_pointer_plus_loc (location
,
877 addr
= tree_mem_ref_addr (ptr_type_node
, t
);
879 limit
= fold_build_pointer_plus_hwi_loc (location
,
880 fold_build_pointer_plus_loc (location
,
884 case ARRAY_RANGE_REF
:
885 warning (OPT_Wmudflap
,
886 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
890 /* ??? merge with COMPONENT_REF code above? */
894 /* If we're not dereferencing something, then the access
896 if (TREE_CODE (TREE_OPERAND (t
, 0)) != INDIRECT_REF
)
899 bpu
= bitsize_int (BITS_PER_UNIT
);
900 ofs
= fold_convert (bitsizetype
, TREE_OPERAND (t
, 2));
901 rem
= size_binop_loc (location
, TRUNC_MOD_EXPR
, ofs
, bpu
);
902 ofs
= size_binop_loc (location
, TRUNC_DIV_EXPR
, ofs
, bpu
);
904 size
= fold_convert (bitsizetype
, TREE_OPERAND (t
, 1));
905 size
= size_binop_loc (location
, PLUS_EXPR
, size
, rem
);
906 size
= size_binop_loc (location
, CEIL_DIV_EXPR
, size
, bpu
);
907 size
= fold_convert (sizetype
, size
);
909 addr
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
910 addr
= fold_convert (ptr_type_node
, addr
);
911 addr
= fold_build_pointer_plus_loc (location
, addr
, ofs
);
914 limit
= fold_build_pointer_plus_hwi_loc (location
,
915 fold_build_pointer_plus_loc (location
,
924 mf_build_check_statement_for (base
, limit
, iter
, location
, dirflag
);
927 1) Memory references.
928 2) BUILTIN_ALLOCA calls.
931 mf_xform_statements (void)
933 basic_block bb
, next
;
934 gimple_stmt_iterator i
;
935 int saved_last_basic_block
= last_basic_block
;
936 enum gimple_rhs_class grhs_class
;
938 bb
= ENTRY_BLOCK_PTR
->next_bb
;
942 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
944 gimple s
= gsi_stmt (i
);
946 /* Only a few GIMPLE statements can reference memory. */
947 switch (gimple_code (s
))
950 mf_xform_derefs_1 (&i
, gimple_assign_lhs_ptr (s
),
951 gimple_location (s
), integer_one_node
);
952 mf_xform_derefs_1 (&i
, gimple_assign_rhs1_ptr (s
),
953 gimple_location (s
), integer_zero_node
);
954 grhs_class
= get_gimple_rhs_class (gimple_assign_rhs_code (s
));
955 if (grhs_class
== GIMPLE_BINARY_RHS
)
956 mf_xform_derefs_1 (&i
, gimple_assign_rhs2_ptr (s
),
957 gimple_location (s
), integer_zero_node
);
961 if (gimple_return_retval (s
) != NULL_TREE
)
963 mf_xform_derefs_1 (&i
, gimple_return_retval_ptr (s
),
971 tree fndecl
= gimple_call_fndecl (s
);
972 if (fndecl
&& (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
))
973 gimple_call_set_cannot_inline (s
, true);
983 while (bb
&& bb
->index
<= saved_last_basic_block
);
986 /* ------------------------------------------------------------------------ */
987 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
988 transforms on the current function.
990 This is the first part of the mudflap instrumentation. It works on
991 high-level GIMPLE because after lowering, all variables are moved out
992 of their BIND_EXPR binding context, and we lose liveness information
993 for the declarations we wish to instrument. */
996 execute_mudflap_function_decls (void)
998 struct gimplify_ctx gctx
;
1000 /* Don't instrument functions such as the synthetic constructor
1001 built during mudflap_finish_file. */
1002 if (mf_marked_p (current_function_decl
) ||
1003 DECL_ARTIFICIAL (current_function_decl
))
1006 push_gimplify_context (&gctx
);
1008 mf_xform_decls (gimple_body (current_function_decl
),
1009 DECL_ARGUMENTS (current_function_decl
));
1011 pop_gimplify_context (NULL
);
1015 /* This struct is passed between mf_xform_decls to store state needed
1016 during the traversal searching for objects that have their
1018 struct mf_xform_decls_data
1024 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1025 _DECLs if appropriate. Arrange to call the __mf_register function
1026 now, and the __mf_unregister function later for each. Return the
1027 gimple sequence after synthesis. */
1029 mx_register_decls (tree decl
, gimple_seq seq
, location_t location
)
1031 gimple_seq finally_stmts
= NULL
;
1032 gimple_stmt_iterator initially_stmts
= gsi_start (seq
);
1034 while (decl
!= NULL_TREE
)
1036 if (mf_decl_eligible_p (decl
)
1037 /* Not already processed. */
1038 && ! mf_marked_p (decl
)
1039 /* Automatic variable. */
1040 && ! DECL_EXTERNAL (decl
)
1041 && ! TREE_STATIC (decl
))
1043 tree size
= NULL_TREE
, variable_name
;
1044 gimple unregister_fncall
, register_fncall
;
1045 tree unregister_fncall_param
, register_fncall_param
;
1047 /* Variable-sized objects should have sizes already been
1048 gimplified when we got here. */
1049 size
= fold_convert (size_type_node
,
1050 TYPE_SIZE_UNIT (TREE_TYPE (decl
)));
1051 gcc_assert (is_gimple_val (size
));
1054 unregister_fncall_param
=
1055 mf_mark (build1 (ADDR_EXPR
,
1056 build_pointer_type (TREE_TYPE (decl
)),
1058 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1059 unregister_fncall
= gimple_build_call (mf_unregister_fndecl
, 3,
1060 unregister_fncall_param
,
1062 integer_three_node
);
1065 variable_name
= mf_varname_tree (decl
);
1066 register_fncall_param
=
1067 mf_mark (build1 (ADDR_EXPR
,
1068 build_pointer_type (TREE_TYPE (decl
)),
1070 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1072 register_fncall
= gimple_build_call (mf_register_fndecl
, 4,
1073 register_fncall_param
,
1079 /* Accumulate the two calls. */
1080 gimple_set_location (register_fncall
, location
);
1081 gimple_set_location (unregister_fncall
, location
);
1083 /* Add the __mf_register call at the current appending point. */
1084 if (gsi_end_p (initially_stmts
))
1086 if (!DECL_ARTIFICIAL (decl
))
1087 warning (OPT_Wmudflap
,
1088 "mudflap cannot track %qE in stub function",
1093 gsi_insert_before (&initially_stmts
, register_fncall
,
1096 /* Accumulate the FINALLY piece. */
1097 gimple_seq_add_stmt (&finally_stmts
, unregister_fncall
);
1102 decl
= DECL_CHAIN (decl
);
1105 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1106 if (finally_stmts
!= NULL
)
1108 gimple stmt
= gimple_build_try (seq
, finally_stmts
, GIMPLE_TRY_FINALLY
);
1109 gimple_seq new_seq
= gimple_seq_alloc ();
1111 gimple_seq_add_stmt (&new_seq
, stmt
);
1119 /* Process every variable mentioned in BIND_EXPRs. */
1121 mx_xfn_xform_decls (gimple_stmt_iterator
*gsi
,
1122 bool *handled_operands_p ATTRIBUTE_UNUSED
,
1123 struct walk_stmt_info
*wi
)
1125 struct mf_xform_decls_data
*d
= (struct mf_xform_decls_data
*) wi
->info
;
1126 gimple stmt
= gsi_stmt (*gsi
);
1128 switch (gimple_code (stmt
))
1132 /* Process function parameters now (but only once). */
1135 gimple_bind_set_body (stmt
,
1136 mx_register_decls (d
->param_decls
,
1137 gimple_bind_body (stmt
),
1138 gimple_location (stmt
)));
1139 d
->param_decls
= NULL_TREE
;
1142 gimple_bind_set_body (stmt
,
1143 mx_register_decls (gimple_bind_vars (stmt
),
1144 gimple_bind_body (stmt
),
1145 gimple_location (stmt
)));
1156 /* Perform the object lifetime tracking mudflap transform on the given function
1157 tree. The tree is mutated in place, with possibly copied subtree nodes.
1159 For every auto variable declared, if its address is ever taken
1160 within the function, then supply its lifetime to the mudflap
1161 runtime with the __mf_register and __mf_unregister calls.
1165 mf_xform_decls (gimple_seq fnbody
, tree fnparams
)
1167 struct mf_xform_decls_data d
;
1168 struct walk_stmt_info wi
;
1169 struct pointer_set_t
*pset
= pointer_set_create ();
1171 d
.param_decls
= fnparams
;
1172 memset (&wi
, 0, sizeof (wi
));
1173 wi
.info
= (void*) &d
;
1175 walk_gimple_seq (fnbody
, mx_xfn_xform_decls
, NULL
, &wi
);
1176 pointer_set_destroy (pset
);
1180 /* ------------------------------------------------------------------------ */
1181 /* Externally visible mudflap functions. */
1184 /* Mark and return the given tree node to prevent further mudflap
1186 static GTY ((param_is (union tree_node
))) htab_t marked_trees
= NULL
;
1193 if (marked_trees
== NULL
)
1194 marked_trees
= htab_create_ggc (31, htab_hash_pointer
, htab_eq_pointer
,
1197 slot
= htab_find_slot (marked_trees
, t
, INSERT
);
1203 mf_marked_p (tree t
)
1207 if (marked_trees
== NULL
)
1210 entry
= htab_find (marked_trees
, t
);
1211 return (entry
!= NULL
);
1214 /* Remember given node as a static of some kind: global data,
1215 function-scope static, or an anonymous constant. Its assembler
1218 /* A list of globals whose incomplete declarations we encountered.
1219 Instead of emitting the __mf_register call for them here, it's
1220 delayed until program finish time. If they're still incomplete by
1221 then, warnings are emitted. */
1223 static GTY (()) VEC(tree
,gc
) *deferred_static_decls
;
1225 /* A list of statements for calling __mf_register() at startup time. */
1226 static GTY (()) tree enqueued_call_stmt_chain
;
1229 mudflap_register_call (tree obj
, tree object_size
, tree varname
)
1231 tree arg
, call_stmt
;
1233 arg
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (obj
)), obj
);
1234 arg
= fold_convert (ptr_type_node
, arg
);
1236 call_stmt
= build_call_expr (mf_register_fndecl
, 4,
1238 fold_convert (size_type_node
, object_size
),
1239 /* __MF_TYPE_STATIC */
1240 build_int_cst (integer_type_node
, 4),
1243 append_to_statement_list (call_stmt
, &enqueued_call_stmt_chain
);
1247 mudflap_enqueue_decl (tree obj
)
1249 if (mf_marked_p (obj
))
1252 /* We don't need to process variable decls that are internally
1253 generated extern. If we did, we'd end up with warnings for them
1254 during mudflap_finish_file (). That would confuse the user,
1255 since the text would refer to variables that don't show up in the
1256 user's source code. */
1257 if (DECL_P (obj
) && DECL_EXTERNAL (obj
) && DECL_ARTIFICIAL (obj
))
1260 VEC_safe_push (tree
, gc
, deferred_static_decls
, obj
);
1265 mudflap_enqueue_constant (tree obj
)
1267 tree object_size
, varname
;
1269 if (mf_marked_p (obj
))
1272 if (TREE_CODE (obj
) == STRING_CST
)
1273 object_size
= size_int (TREE_STRING_LENGTH (obj
));
1275 object_size
= size_in_bytes (TREE_TYPE (obj
));
1277 if (TREE_CODE (obj
) == STRING_CST
)
1278 varname
= mf_build_string ("string literal");
1280 varname
= mf_build_string ("constant");
1282 mudflap_register_call (obj
, object_size
, varname
);
1286 /* Emit any file-wide instrumentation. */
1288 mudflap_finish_file (void)
1290 tree ctor_statements
= NULL_TREE
;
1292 /* No need to continue when there were errors. */
1296 /* Insert a call to __mf_init. */
1298 tree call2_stmt
= build_call_expr (mf_init_fndecl
, 0);
1299 append_to_statement_list (call2_stmt
, &ctor_statements
);
1302 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1303 if (flag_mudflap_ignore_reads
)
1305 tree arg
= mf_build_string ("-ignore-reads");
1306 tree call_stmt
= build_call_expr (mf_set_options_fndecl
, 1, arg
);
1307 append_to_statement_list (call_stmt
, &ctor_statements
);
1310 /* Process all enqueued object decls. */
1311 if (deferred_static_decls
)
1315 FOR_EACH_VEC_ELT (tree
, deferred_static_decls
, i
, obj
)
1317 gcc_assert (DECL_P (obj
));
1319 if (mf_marked_p (obj
))
1322 /* Omit registration for static unaddressed objects. NB:
1323 Perform registration for non-static objects regardless of
1324 TREE_USED or TREE_ADDRESSABLE, because they may be used
1325 from other compilation units. */
1326 if (! TREE_PUBLIC (obj
) && ! TREE_ADDRESSABLE (obj
))
1329 if (! COMPLETE_TYPE_P (TREE_TYPE (obj
)))
1331 warning (OPT_Wmudflap
,
1332 "mudflap cannot track unknown size extern %qE",
1337 mudflap_register_call (obj
,
1338 size_in_bytes (TREE_TYPE (obj
)),
1339 mf_varname_tree (obj
));
1342 VEC_truncate (tree
, deferred_static_decls
, 0);
1345 /* Append all the enqueued registration calls. */
1346 if (enqueued_call_stmt_chain
)
1348 append_to_statement_list (enqueued_call_stmt_chain
, &ctor_statements
);
1349 enqueued_call_stmt_chain
= NULL_TREE
;
1352 cgraph_build_static_cdtor ('I', ctor_statements
,
1353 MAX_RESERVED_INIT_PRIORITY
-1);
1360 return flag_mudflap
!= 0;
1363 struct gimple_opt_pass pass_mudflap_1
=
1367 "mudflap1", /* name */
1368 gate_mudflap
, /* gate */
1369 execute_mudflap_function_decls
, /* execute */
1372 0, /* static_pass_number */
1373 TV_NONE
, /* tv_id */
1374 PROP_gimple_any
, /* properties_required */
1375 0, /* properties_provided */
1376 0, /* properties_destroyed */
1377 0, /* todo_flags_start */
1378 0 /* todo_flags_finish */
1382 struct gimple_opt_pass pass_mudflap_2
=
1386 "mudflap2", /* name */
1387 gate_mudflap
, /* gate */
1388 execute_mudflap_function_ops
, /* execute */
1391 0, /* static_pass_number */
1392 TV_NONE
, /* tv_id */
1393 PROP_ssa
| PROP_cfg
| PROP_gimple_leh
,/* properties_required */
1394 0, /* properties_provided */
1395 0, /* properties_destroyed */
1396 0, /* todo_flags_start */
1397 TODO_verify_flow
| TODO_verify_stmts
1398 | TODO_update_ssa
/* todo_flags_finish */
1402 #include "gt-tree-mudflap.h"