1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
26 #include "coretypes.h"
28 #include "hard-reg-set.h"
32 #include "basic-block.h"
35 #include "tree-inline.h"
37 #include "tree-iterator.h"
38 #include "tree-flow.h"
39 #include "tree-mudflap.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
43 #include "diagnostic.h"
45 #include "langhooks.h"
51 /* Internal function decls */
55 #define flag_mudflap_threads (flag_mudflap == 2)
58 static tree
mf_build_string (const char *string
);
59 static tree
mf_varname_tree (tree
);
60 static tree
mf_file_function_line_tree (location_t
);
62 /* Indirection-related instrumentation. */
63 static void mf_decl_cache_locals (void);
64 static void mf_decl_clear_locals (void);
65 static void mf_xform_statements (void);
66 static unsigned int execute_mudflap_function_ops (void);
68 /* Addressable variables instrumentation. */
69 static void mf_xform_decls (gimple_seq
, tree
);
70 static tree
mx_xfn_xform_decls (gimple_stmt_iterator
*, bool *,
71 struct walk_stmt_info
*);
72 static gimple_seq
mx_register_decls (tree
, gimple_seq
, location_t
);
73 static unsigned int execute_mudflap_function_decls (void);
76 /* ------------------------------------------------------------------------ */
77 /* Some generally helpful functions for mudflap instrumentation. */
79 /* Build a reference to a literal string. */
81 mf_build_string (const char *string
)
83 size_t len
= strlen (string
);
84 tree result
= mf_mark (build_string (len
+ 1, string
));
86 TREE_TYPE (result
) = build_array_type
87 (char_type_node
, build_index_type (build_int_cst (NULL_TREE
, len
)));
88 TREE_CONSTANT (result
) = 1;
89 TREE_READONLY (result
) = 1;
90 TREE_STATIC (result
) = 1;
92 result
= build1 (ADDR_EXPR
, build_pointer_type (char_type_node
), result
);
94 return mf_mark (result
);
97 /* Create a properly typed STRING_CST node that describes the given
98 declaration. It will be used as an argument for __mf_register().
99 Try to construct a helpful string, including file/function/variable
103 mf_varname_tree (tree decl
)
105 static pretty_printer buf_rec
;
106 static int initialized
= 0;
107 pretty_printer
*buf
= & buf_rec
;
108 const char *buf_contents
;
115 pp_construct (buf
, /* prefix */ NULL
, /* line-width */ 0);
118 pp_clear_output_area (buf
);
120 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
122 expanded_location xloc
= expand_location (DECL_SOURCE_LOCATION (decl
));
123 const char *sourcefile
;
124 unsigned sourceline
= xloc
.line
;
125 unsigned sourcecolumn
= 0;
126 sourcecolumn
= xloc
.column
;
127 sourcefile
= xloc
.file
;
128 if (sourcefile
== NULL
&& current_function_decl
!= NULL_TREE
)
129 sourcefile
= DECL_SOURCE_FILE (current_function_decl
);
130 if (sourcefile
== NULL
)
131 sourcefile
= "<unknown file>";
133 pp_string (buf
, sourcefile
);
137 pp_string (buf
, ":");
138 pp_decimal_int (buf
, sourceline
);
140 if (sourcecolumn
!= 0)
142 pp_string (buf
, ":");
143 pp_decimal_int (buf
, sourcecolumn
);
148 if (current_function_decl
!= NULL_TREE
)
151 pp_string (buf
, " (");
153 const char *funcname
= NULL
;
154 if (DECL_NAME (current_function_decl
))
155 funcname
= lang_hooks
.decl_printable_name (current_function_decl
, 1);
156 if (funcname
== NULL
)
157 funcname
= "anonymous fn";
159 pp_string (buf
, funcname
);
161 pp_string (buf
, ") ");
164 pp_string (buf
, " ");
166 /* Add <variable-declaration>, possibly demangled. */
168 const char *declname
= NULL
;
170 if (DECL_NAME (decl
) != NULL
)
172 if (strcmp ("GNU C++", lang_hooks
.name
) == 0)
174 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
175 the libiberty demangler. */
176 declname
= cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl
)),
177 DMGL_AUTO
| DMGL_VERBOSE
);
179 if (declname
== NULL
)
180 declname
= lang_hooks
.decl_printable_name (decl
, 3);
182 if (declname
== NULL
)
183 declname
= "<unnamed variable>";
185 pp_string (buf
, declname
);
188 /* Return the lot as a new STRING_CST. */
189 buf_contents
= pp_base_formatted_text (buf
);
190 result
= mf_build_string (buf_contents
);
191 pp_clear_output_area (buf
);
197 /* And another friend, for producing a simpler message. */
200 mf_file_function_line_tree (location_t location
)
202 expanded_location xloc
= expand_location (location
);
203 const char *file
= NULL
, *colon
, *line
, *op
, *name
, *cp
;
204 char linecolbuf
[30]; /* Enough for two decimal numbers plus a colon. */
208 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
210 if (file
== NULL
&& current_function_decl
!= NULL_TREE
)
211 file
= DECL_SOURCE_FILE (current_function_decl
);
213 file
= "<unknown file>";
218 sprintf (linecolbuf
, "%d:%d", xloc
.line
, xloc
.column
);
220 sprintf (linecolbuf
, "%d", xloc
.line
);
227 /* Add (FUNCTION). */
228 name
= lang_hooks
.decl_printable_name (current_function_decl
, 1);
237 string
= concat (file
, colon
, line
, op
, name
, cp
, NULL
);
238 result
= mf_build_string (string
);
245 /* global tree nodes */
247 /* Global tree objects for global variables and functions exported by
248 mudflap runtime library. mf_init_extern_trees must be called
249 before using these. */
251 /* uintptr_t (usually "unsigned long") */
252 static GTY (()) tree mf_uintptr_type
;
254 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
255 static GTY (()) tree mf_cache_struct_type
;
257 /* struct __mf_cache * const */
258 static GTY (()) tree mf_cache_structptr_type
;
260 /* extern struct __mf_cache __mf_lookup_cache []; */
261 static GTY (()) tree mf_cache_array_decl
;
263 /* extern unsigned char __mf_lc_shift; */
264 static GTY (()) tree mf_cache_shift_decl
;
266 /* extern uintptr_t __mf_lc_mask; */
267 static GTY (()) tree mf_cache_mask_decl
;
269 /* Their function-scope local shadows, used in single-threaded mode only. */
271 /* auto const unsigned char __mf_lc_shift_l; */
272 static GTY (()) tree mf_cache_shift_decl_l
;
274 /* auto const uintptr_t __mf_lc_mask_l; */
275 static GTY (()) tree mf_cache_mask_decl_l
;
277 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
278 static GTY (()) tree mf_check_fndecl
;
280 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
281 static GTY (()) tree mf_register_fndecl
;
283 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
284 static GTY (()) tree mf_unregister_fndecl
;
286 /* extern void __mf_init (); */
287 static GTY (()) tree mf_init_fndecl
;
289 /* extern int __mf_set_options (const char*); */
290 static GTY (()) tree mf_set_options_fndecl
;
293 /* Helper for mudflap_init: construct a decl with the given category,
294 name, and type, mark it an external reference, and pushdecl it. */
296 mf_make_builtin (enum tree_code category
, const char *name
, tree type
)
298 tree decl
= mf_mark (build_decl (UNKNOWN_LOCATION
,
299 category
, get_identifier (name
), type
));
300 TREE_PUBLIC (decl
) = 1;
301 DECL_EXTERNAL (decl
) = 1;
302 lang_hooks
.decls
.pushdecl (decl
);
303 /* The decl was declared by the compiler. */
304 DECL_ARTIFICIAL (decl
) = 1;
305 /* And we don't want debug info for it. */
306 DECL_IGNORED_P (decl
) = 1;
310 /* Helper for mudflap_init: construct a tree corresponding to the type
311 struct __mf_cache { uintptr_t low; uintptr_t high; };
312 where uintptr_t is the FIELD_TYPE argument. */
314 mf_make_mf_cache_struct_type (tree field_type
)
316 /* There is, abominably, no language-independent way to construct a
317 RECORD_TYPE. So we have to call the basic type construction
318 primitives by hand. */
319 tree fieldlo
= build_decl (UNKNOWN_LOCATION
,
320 FIELD_DECL
, get_identifier ("low"), field_type
);
321 tree fieldhi
= build_decl (UNKNOWN_LOCATION
,
322 FIELD_DECL
, get_identifier ("high"), field_type
);
324 tree struct_type
= make_node (RECORD_TYPE
);
325 DECL_CONTEXT (fieldlo
) = struct_type
;
326 DECL_CONTEXT (fieldhi
) = struct_type
;
327 TREE_CHAIN (fieldlo
) = fieldhi
;
328 TYPE_FIELDS (struct_type
) = fieldlo
;
329 TYPE_NAME (struct_type
) = get_identifier ("__mf_cache");
330 layout_type (struct_type
);
335 /* Initialize the global tree nodes that correspond to mf-runtime.h
340 static bool done
= false;
341 tree mf_const_string_type
;
342 tree mf_cache_array_type
;
343 tree mf_check_register_fntype
;
344 tree mf_unregister_fntype
;
346 tree mf_set_options_fntype
;
352 mf_uintptr_type
= lang_hooks
.types
.type_for_mode (ptr_mode
,
355 = build_pointer_type (build_qualified_type
356 (char_type_node
, TYPE_QUAL_CONST
));
358 mf_cache_struct_type
= mf_make_mf_cache_struct_type (mf_uintptr_type
);
359 mf_cache_structptr_type
= build_pointer_type (mf_cache_struct_type
);
360 mf_cache_array_type
= build_array_type (mf_cache_struct_type
, 0);
361 mf_check_register_fntype
=
362 build_function_type_list (void_type_node
, ptr_type_node
, size_type_node
,
363 integer_type_node
, mf_const_string_type
, NULL_TREE
);
364 mf_unregister_fntype
=
365 build_function_type_list (void_type_node
, ptr_type_node
, size_type_node
,
366 integer_type_node
, NULL_TREE
);
368 build_function_type_list (void_type_node
, NULL_TREE
);
369 mf_set_options_fntype
=
370 build_function_type_list (integer_type_node
, mf_const_string_type
, NULL_TREE
);
372 mf_cache_array_decl
= mf_make_builtin (VAR_DECL
, "__mf_lookup_cache",
373 mf_cache_array_type
);
374 mf_cache_shift_decl
= mf_make_builtin (VAR_DECL
, "__mf_lc_shift",
375 unsigned_char_type_node
);
376 mf_cache_mask_decl
= mf_make_builtin (VAR_DECL
, "__mf_lc_mask",
378 /* Don't process these in mudflap_enqueue_decl, should they come by
379 there for some reason. */
380 mf_mark (mf_cache_array_decl
);
381 mf_mark (mf_cache_shift_decl
);
382 mf_mark (mf_cache_mask_decl
);
383 mf_check_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_check",
384 mf_check_register_fntype
);
385 mf_register_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_register",
386 mf_check_register_fntype
);
387 mf_unregister_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_unregister",
388 mf_unregister_fntype
);
389 mf_init_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_init",
391 mf_set_options_fndecl
= mf_make_builtin (FUNCTION_DECL
, "__mf_set_options",
392 mf_set_options_fntype
);
396 /* ------------------------------------------------------------------------ */
397 /* This is the second part of the mudflap instrumentation. It works on
398 low-level GIMPLE using the CFG, because we want to run this pass after
399 tree optimizations have been performed, but we have to preserve the CFG
400 for expansion from trees to RTL.
401 Below is the list of transformations performed on statements in the
404 1) Memory reference transforms: Perform the mudflap indirection-related
405 tree transforms on memory references.
407 2) Mark BUILTIN_ALLOCA calls not inlineable.
412 execute_mudflap_function_ops (void)
414 struct gimplify_ctx gctx
;
416 /* Don't instrument functions such as the synthetic constructor
417 built during mudflap_finish_file. */
418 if (mf_marked_p (current_function_decl
) ||
419 DECL_ARTIFICIAL (current_function_decl
))
422 push_gimplify_context (&gctx
);
424 /* In multithreaded mode, don't cache the lookup cache parameters. */
425 if (! flag_mudflap_threads
)
426 mf_decl_cache_locals ();
428 mf_xform_statements ();
430 if (! flag_mudflap_threads
)
431 mf_decl_clear_locals ();
433 pop_gimplify_context (NULL
);
437 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
438 if BB has more than one edge, STMT will be replicated for each edge.
439 Also, abnormal edges will be ignored. */
442 insert_edge_copies_seq (gimple_seq seq
, basic_block bb
)
446 unsigned n_copies
= -1;
448 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
449 if (!(e
->flags
& EDGE_ABNORMAL
))
452 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
453 if (!(e
->flags
& EDGE_ABNORMAL
))
454 gsi_insert_seq_on_edge (e
, n_copies
-- > 0 ? gimple_seq_copy (seq
) : seq
);
457 /* Create and initialize local shadow variables for the lookup cache
458 globals. Put their decls in the *_l globals for use by
459 mf_build_check_statement_for. */
462 mf_decl_cache_locals (void)
465 gimple_seq seq
= gimple_seq_alloc ();
467 /* Build the cache vars. */
468 mf_cache_shift_decl_l
469 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl
),
470 "__mf_lookup_shift_l"));
473 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl
),
474 "__mf_lookup_mask_l"));
476 /* Build initialization nodes for the cache vars. We just load the
477 globals into the cache variables. */
478 g
= gimple_build_assign (mf_cache_shift_decl_l
, mf_cache_shift_decl
);
479 gimple_set_location (g
, DECL_SOURCE_LOCATION (current_function_decl
));
480 gimple_seq_add_stmt (&seq
, g
);
482 g
= gimple_build_assign (mf_cache_mask_decl_l
, mf_cache_mask_decl
);
483 gimple_set_location (g
, DECL_SOURCE_LOCATION (current_function_decl
));
484 gimple_seq_add_stmt (&seq
, g
);
486 insert_edge_copies_seq (seq
, ENTRY_BLOCK_PTR
);
488 gsi_commit_edge_inserts ();
493 mf_decl_clear_locals (void)
495 /* Unset local shadows. */
496 mf_cache_shift_decl_l
= NULL_TREE
;
497 mf_cache_mask_decl_l
= NULL_TREE
;
501 mf_build_check_statement_for (tree base
, tree limit
,
502 gimple_stmt_iterator
*instr_gsi
,
503 location_t location
, tree dirflag
)
505 gimple_stmt_iterator gsi
;
506 basic_block cond_bb
, then_bb
, join_bb
;
513 gimple_seq seq
, stmts
;
515 /* We first need to split the current basic block, and start altering
516 the CFG. This allows us to insert the statements we're about to
517 construct into the right basic blocks. */
519 cond_bb
= gimple_bb (gsi_stmt (*instr_gsi
));
522 if (! gsi_end_p (gsi
))
523 e
= split_block (cond_bb
, gsi_stmt (gsi
));
525 e
= split_block_after_labels (cond_bb
);
529 /* A recap at this point: join_bb is the basic block at whose head
530 is the gimple statement for which this check expression is being
531 built. cond_bb is the (possibly new, synthetic) basic block the
532 end of which will contain the cache-lookup code, and a
533 conditional that jumps to the cache-miss code or, much more
534 likely, over to join_bb. */
536 /* Create the bb that contains the cache-miss fallback block (mf_check). */
537 then_bb
= create_empty_bb (cond_bb
);
538 make_edge (cond_bb
, then_bb
, EDGE_TRUE_VALUE
);
539 make_single_succ_edge (then_bb
, join_bb
, EDGE_FALLTHRU
);
541 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
542 e
= find_edge (cond_bb
, join_bb
);
543 e
->flags
= EDGE_FALSE_VALUE
;
544 e
->count
= cond_bb
->count
;
545 e
->probability
= REG_BR_PROB_BASE
;
547 /* Update dominance info. Note that bb_join's data was
548 updated by split_block. */
549 if (dom_info_available_p (CDI_DOMINATORS
))
551 set_immediate_dominator (CDI_DOMINATORS
, then_bb
, cond_bb
);
552 set_immediate_dominator (CDI_DOMINATORS
, join_bb
, cond_bb
);
555 /* Build our local variables. */
556 mf_elem
= make_rename_temp (mf_cache_structptr_type
, "__mf_elem");
557 mf_base
= make_rename_temp (mf_uintptr_type
, "__mf_base");
558 mf_limit
= make_rename_temp (mf_uintptr_type
, "__mf_limit");
560 /* Build: __mf_base = (uintptr_t) <base address expression>. */
561 seq
= gimple_seq_alloc ();
562 t
= fold_convert_loc (location
, mf_uintptr_type
,
563 unshare_expr (base
));
564 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
565 gimple_seq_add_seq (&seq
, stmts
);
566 g
= gimple_build_assign (mf_base
, t
);
567 gimple_set_location (g
, location
);
568 gimple_seq_add_stmt (&seq
, g
);
570 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
571 t
= fold_convert_loc (location
, mf_uintptr_type
,
572 unshare_expr (limit
));
573 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
574 gimple_seq_add_seq (&seq
, stmts
);
575 g
= gimple_build_assign (mf_limit
, t
);
576 gimple_set_location (g
, location
);
577 gimple_seq_add_stmt (&seq
, g
);
579 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
581 t
= build2 (RSHIFT_EXPR
, mf_uintptr_type
, mf_base
,
582 flag_mudflap_threads
? mf_cache_shift_decl
583 : mf_cache_shift_decl_l
);
584 t
= build2 (BIT_AND_EXPR
, mf_uintptr_type
, t
,
585 flag_mudflap_threads
? mf_cache_mask_decl
586 : mf_cache_mask_decl_l
);
587 t
= build4 (ARRAY_REF
,
588 TREE_TYPE (TREE_TYPE (mf_cache_array_decl
)),
589 mf_cache_array_decl
, t
, NULL_TREE
, NULL_TREE
);
590 t
= build1 (ADDR_EXPR
, mf_cache_structptr_type
, t
);
591 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
592 gimple_seq_add_seq (&seq
, stmts
);
593 g
= gimple_build_assign (mf_elem
, t
);
594 gimple_set_location (g
, location
);
595 gimple_seq_add_stmt (&seq
, g
);
597 /* Quick validity check.
599 if (__mf_elem->low > __mf_base
600 || (__mf_elem_high < __mf_limit))
603 ... and only if single-threaded:
604 __mf_lookup_shift_1 = f...;
605 __mf_lookup_mask_l = ...;
608 It is expected that this body of code is rarely executed so we mark
609 the edge to the THEN clause of the conditional jump as unlikely. */
611 /* Construct t <-- '__mf_elem->low > __mf_base'. */
612 t
= build3 (COMPONENT_REF
, mf_uintptr_type
,
613 build1 (INDIRECT_REF
, mf_cache_struct_type
, mf_elem
),
614 TYPE_FIELDS (mf_cache_struct_type
), NULL_TREE
);
615 t
= build2 (GT_EXPR
, boolean_type_node
, t
, mf_base
);
617 /* Construct '__mf_elem->high < __mf_limit'.
620 1) u <-- '__mf_elem->high'
621 2) v <-- '__mf_limit'.
623 Then build 'u <-- (u < v). */
625 u
= build3 (COMPONENT_REF
, mf_uintptr_type
,
626 build1 (INDIRECT_REF
, mf_cache_struct_type
, mf_elem
),
627 TREE_CHAIN (TYPE_FIELDS (mf_cache_struct_type
)), NULL_TREE
);
631 u
= build2 (LT_EXPR
, boolean_type_node
, u
, v
);
633 /* Build the composed conditional: t <-- 't || u'. Then store the
634 result of the evaluation of 't' in a temporary variable which we
635 can use as the condition for the conditional jump. */
636 t
= build2 (TRUTH_OR_EXPR
, boolean_type_node
, t
, u
);
637 t
= force_gimple_operand (t
, &stmts
, false, NULL_TREE
);
638 gimple_seq_add_seq (&seq
, stmts
);
639 cond
= make_rename_temp (boolean_type_node
, "__mf_unlikely_cond");
640 g
= gimple_build_assign (cond
, t
);
641 gimple_set_location (g
, location
);
642 gimple_seq_add_stmt (&seq
, g
);
644 /* Build the conditional jump. 'cond' is just a temporary so we can
645 simply build a void COND_EXPR. We do need labels in both arms though. */
646 g
= gimple_build_cond (NE_EXPR
, cond
, boolean_false_node
, NULL_TREE
,
648 gimple_set_location (g
, location
);
649 gimple_seq_add_stmt (&seq
, g
);
651 /* At this point, after so much hard work, we have only constructed
652 the conditional jump,
654 if (__mf_elem->low > __mf_base
655 || (__mf_elem_high < __mf_limit))
657 The lowered GIMPLE tree representing this code is in the statement
658 list starting at 'head'.
660 We can insert this now in the current basic block, i.e. the one that
661 the statement we're instrumenting was originally in. */
662 gsi
= gsi_last_bb (cond_bb
);
663 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
665 /* Now build up the body of the cache-miss handling:
670 This is the body of the conditional. */
672 seq
= gimple_seq_alloc ();
673 /* u is a string, so it is already a gimple value. */
674 u
= mf_file_function_line_tree (location
);
675 /* NB: we pass the overall [base..limit] range to mf_check. */
676 v
= fold_build2_loc (location
, PLUS_EXPR
, mf_uintptr_type
,
677 fold_build2_loc (location
,
678 MINUS_EXPR
, mf_uintptr_type
, mf_limit
, mf_base
),
679 build_int_cst (mf_uintptr_type
, 1));
680 v
= force_gimple_operand (v
, &stmts
, true, NULL_TREE
);
681 gimple_seq_add_seq (&seq
, stmts
);
682 g
= gimple_build_call (mf_check_fndecl
, 4, mf_base
, v
, dirflag
, u
);
683 gimple_seq_add_stmt (&seq
, g
);
685 if (! flag_mudflap_threads
)
687 if (stmt_ends_bb_p (g
))
689 gsi
= gsi_start_bb (then_bb
);
690 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
691 e
= split_block (then_bb
, g
);
693 seq
= gimple_seq_alloc ();
696 g
= gimple_build_assign (mf_cache_shift_decl_l
, mf_cache_shift_decl
);
697 gimple_seq_add_stmt (&seq
, g
);
699 g
= gimple_build_assign (mf_cache_mask_decl_l
, mf_cache_mask_decl
);
700 gimple_seq_add_stmt (&seq
, g
);
703 /* Insert the check code in the THEN block. */
704 gsi
= gsi_start_bb (then_bb
);
705 gsi_insert_seq_after (&gsi
, seq
, GSI_CONTINUE_LINKING
);
707 *instr_gsi
= gsi_start_bb (join_bb
);
711 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
712 eligible for instrumentation. For the mudflap1 pass, this implies
713 that it should be registered with the libmudflap runtime. For the
714 mudflap2 pass this means instrumenting an indirection operation with
715 respect to the object.
718 mf_decl_eligible_p (tree decl
)
720 return ((TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == PARM_DECL
)
721 /* The decl must have its address taken. In the case of
722 arrays, this flag is also set if the indexes are not
723 compile-time known valid constants. */
724 /* XXX: not sufficient: return-by-value structs! */
725 && TREE_ADDRESSABLE (decl
)
726 /* The type of the variable must be complete. */
727 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl
))
728 /* The decl hasn't been decomposed somehow. */
729 && !DECL_HAS_VALUE_EXPR_P (decl
));
734 mf_xform_derefs_1 (gimple_stmt_iterator
*iter
, tree
*tp
,
735 location_t location
, tree dirflag
)
737 tree type
, base
, limit
, addr
, size
, t
;
739 /* Don't instrument read operations. */
740 if (dirflag
== integer_zero_node
&& flag_mudflap_ignore_reads
)
743 /* Don't instrument marked nodes. */
744 if (mf_marked_p (*tp
))
748 type
= TREE_TYPE (t
);
750 if (type
== error_mark_node
)
753 size
= TYPE_SIZE_UNIT (type
);
755 switch (TREE_CODE (t
))
760 /* This is trickier than it may first appear. The reason is
761 that we are looking at expressions from the "inside out" at
762 this point. We may have a complex nested aggregate/array
763 expression (e.g. "a.b[i].c"), maybe with an indirection as
764 the leftmost operator ("p->a.b.d"), where instrumentation
765 is necessary. Or we may have an innocent "a.b.c"
766 expression that must not be instrumented. We need to
767 recurse all the way down the nesting structure to figure it
768 out: looking just at the outer node is not enough. */
770 int component_ref_only
= (TREE_CODE (t
) == COMPONENT_REF
);
771 /* If we have a bitfield component reference, we must note the
772 innermost addressable object in ELT, from which we will
773 construct the byte-addressable bounds of the bitfield. */
774 tree elt
= NULL_TREE
;
775 int bitfield_ref_p
= (TREE_CODE (t
) == COMPONENT_REF
776 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t
, 1)));
778 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
779 containment hierarchy to find the outermost VAR_DECL. */
780 var
= TREE_OPERAND (t
, 0);
783 if (bitfield_ref_p
&& elt
== NULL_TREE
784 && (TREE_CODE (var
) == ARRAY_REF
785 || TREE_CODE (var
) == COMPONENT_REF
))
788 if (TREE_CODE (var
) == ARRAY_REF
)
790 component_ref_only
= 0;
791 var
= TREE_OPERAND (var
, 0);
793 else if (TREE_CODE (var
) == COMPONENT_REF
)
794 var
= TREE_OPERAND (var
, 0);
795 else if (INDIRECT_REF_P (var
))
797 base
= TREE_OPERAND (var
, 0);
800 else if (TREE_CODE (var
) == VIEW_CONVERT_EXPR
)
802 var
= TREE_OPERAND (var
, 0);
803 if (CONSTANT_CLASS_P (var
)
804 && TREE_CODE (var
) != STRING_CST
)
809 gcc_assert (TREE_CODE (var
) == VAR_DECL
810 || TREE_CODE (var
) == PARM_DECL
811 || TREE_CODE (var
) == RESULT_DECL
812 || TREE_CODE (var
) == STRING_CST
);
813 /* Don't instrument this access if the underlying
814 variable is not "eligible". This test matches
815 those arrays that have only known-valid indexes,
816 and thus are not labeled TREE_ADDRESSABLE. */
817 if (! mf_decl_eligible_p (var
) || component_ref_only
)
821 base
= build1 (ADDR_EXPR
,
822 build_pointer_type (TREE_TYPE (var
)), var
);
828 /* Handle the case of ordinary non-indirection structure
829 accesses. These have only nested COMPONENT_REF nodes (no
830 INDIRECT_REF), but pass through the above filter loop.
831 Note that it's possible for such a struct variable to match
832 the eligible_p test because someone else might take its
835 /* We need special processing for bitfield components, because
836 their addresses cannot be taken. */
839 tree field
= TREE_OPERAND (t
, 1);
841 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
)
842 size
= DECL_SIZE_UNIT (field
);
845 elt
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (elt
)),
847 addr
= fold_convert_loc (location
, ptr_type_node
, elt
? elt
: base
);
848 addr
= fold_build2_loc (location
, POINTER_PLUS_EXPR
, ptr_type_node
,
849 addr
, fold_convert_loc (location
, sizetype
,
850 byte_position (field
)));
853 addr
= build1 (ADDR_EXPR
, build_pointer_type (type
), t
);
855 limit
= fold_build2_loc (location
, MINUS_EXPR
, mf_uintptr_type
,
856 fold_build2_loc (location
, PLUS_EXPR
, mf_uintptr_type
,
857 convert (mf_uintptr_type
, addr
),
864 addr
= TREE_OPERAND (t
, 0);
866 limit
= fold_build2_loc (location
, POINTER_PLUS_EXPR
, ptr_type_node
,
867 fold_build2_loc (location
,
868 POINTER_PLUS_EXPR
, ptr_type_node
, base
,
874 addr
= tree_mem_ref_addr (ptr_type_node
, t
);
876 limit
= fold_build2_loc (location
, POINTER_PLUS_EXPR
, ptr_type_node
,
877 fold_build2_loc (location
,
878 POINTER_PLUS_EXPR
, ptr_type_node
, base
,
883 case ARRAY_RANGE_REF
:
884 warning (OPT_Wmudflap
,
885 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
889 /* ??? merge with COMPONENT_REF code above? */
893 /* If we're not dereferencing something, then the access
895 if (TREE_CODE (TREE_OPERAND (t
, 0)) != INDIRECT_REF
)
898 bpu
= bitsize_int (BITS_PER_UNIT
);
899 ofs
= convert (bitsizetype
, TREE_OPERAND (t
, 2));
900 rem
= size_binop_loc (location
, TRUNC_MOD_EXPR
, ofs
, bpu
);
901 ofs
= fold_convert_loc (location
,
903 size_binop_loc (location
,
904 TRUNC_DIV_EXPR
, ofs
, bpu
));
906 size
= convert (bitsizetype
, TREE_OPERAND (t
, 1));
907 size
= size_binop_loc (location
, PLUS_EXPR
, size
, rem
);
908 size
= size_binop_loc (location
, CEIL_DIV_EXPR
, size
, bpu
);
909 size
= convert (sizetype
, size
);
911 addr
= TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
912 addr
= convert (ptr_type_node
, addr
);
913 addr
= fold_build2_loc (location
, POINTER_PLUS_EXPR
,
914 ptr_type_node
, addr
, ofs
);
917 limit
= fold_build2_loc (location
, POINTER_PLUS_EXPR
, ptr_type_node
,
918 fold_build2_loc (location
,
919 POINTER_PLUS_EXPR
, ptr_type_node
,
929 mf_build_check_statement_for (base
, limit
, iter
, location
, dirflag
);
932 1) Memory references.
933 2) BUILTIN_ALLOCA calls.
936 mf_xform_statements (void)
938 basic_block bb
, next
;
939 gimple_stmt_iterator i
;
940 int saved_last_basic_block
= last_basic_block
;
941 enum gimple_rhs_class grhs_class
;
943 bb
= ENTRY_BLOCK_PTR
->next_bb
;
947 for (i
= gsi_start_bb (bb
); !gsi_end_p (i
); gsi_next (&i
))
949 gimple s
= gsi_stmt (i
);
951 /* Only a few GIMPLE statements can reference memory. */
952 switch (gimple_code (s
))
955 mf_xform_derefs_1 (&i
, gimple_assign_lhs_ptr (s
),
956 gimple_location (s
), integer_one_node
);
957 mf_xform_derefs_1 (&i
, gimple_assign_rhs1_ptr (s
),
958 gimple_location (s
), integer_zero_node
);
959 grhs_class
= get_gimple_rhs_class (gimple_assign_rhs_code (s
));
960 if (grhs_class
== GIMPLE_BINARY_RHS
)
961 mf_xform_derefs_1 (&i
, gimple_assign_rhs2_ptr (s
),
962 gimple_location (s
), integer_zero_node
);
966 if (gimple_return_retval (s
) != NULL_TREE
)
968 mf_xform_derefs_1 (&i
, gimple_return_retval_ptr (s
),
976 tree fndecl
= gimple_call_fndecl (s
);
977 if (fndecl
&& (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
))
978 gimple_call_set_cannot_inline (s
, true);
988 while (bb
&& bb
->index
<= saved_last_basic_block
);
991 /* ------------------------------------------------------------------------ */
992 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
993 transforms on the current function.
995 This is the first part of the mudflap instrumentation. It works on
996 high-level GIMPLE because after lowering, all variables are moved out
997 of their BIND_EXPR binding context, and we lose liveness information
998 for the declarations we wish to instrument. */
1001 execute_mudflap_function_decls (void)
1003 struct gimplify_ctx gctx
;
1005 /* Don't instrument functions such as the synthetic constructor
1006 built during mudflap_finish_file. */
1007 if (mf_marked_p (current_function_decl
) ||
1008 DECL_ARTIFICIAL (current_function_decl
))
1011 push_gimplify_context (&gctx
);
1013 mf_xform_decls (gimple_body (current_function_decl
),
1014 DECL_ARGUMENTS (current_function_decl
));
1016 pop_gimplify_context (NULL
);
1020 /* This struct is passed between mf_xform_decls to store state needed
1021 during the traversal searching for objects that have their
1023 struct mf_xform_decls_data
1029 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1030 _DECLs if appropriate. Arrange to call the __mf_register function
1031 now, and the __mf_unregister function later for each. Return the
1032 gimple sequence after synthesis. */
1034 mx_register_decls (tree decl
, gimple_seq seq
, location_t location
)
1036 gimple_seq finally_stmts
= NULL
;
1037 gimple_stmt_iterator initially_stmts
= gsi_start (seq
);
1039 while (decl
!= NULL_TREE
)
1041 if (mf_decl_eligible_p (decl
)
1042 /* Not already processed. */
1043 && ! mf_marked_p (decl
)
1044 /* Automatic variable. */
1045 && ! DECL_EXTERNAL (decl
)
1046 && ! TREE_STATIC (decl
))
1048 tree size
= NULL_TREE
, variable_name
;
1049 gimple unregister_fncall
, register_fncall
;
1050 tree unregister_fncall_param
, register_fncall_param
;
1052 /* Variable-sized objects should have sizes already been
1053 gimplified when we got here. */
1054 size
= convert (size_type_node
, TYPE_SIZE_UNIT (TREE_TYPE (decl
)));
1055 gcc_assert (is_gimple_val (size
));
1058 unregister_fncall_param
=
1059 mf_mark (build1 (ADDR_EXPR
,
1060 build_pointer_type (TREE_TYPE (decl
)),
1062 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1063 unregister_fncall
= gimple_build_call (mf_unregister_fndecl
, 3,
1064 unregister_fncall_param
,
1066 build_int_cst (NULL_TREE
, 3));
1069 variable_name
= mf_varname_tree (decl
);
1070 register_fncall_param
=
1071 mf_mark (build1 (ADDR_EXPR
,
1072 build_pointer_type (TREE_TYPE (decl
)),
1074 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1076 register_fncall
= gimple_build_call (mf_register_fndecl
, 4,
1077 register_fncall_param
,
1079 build_int_cst (NULL_TREE
, 3),
1083 /* Accumulate the two calls. */
1084 gimple_set_location (register_fncall
, location
);
1085 gimple_set_location (unregister_fncall
, location
);
1087 /* Add the __mf_register call at the current appending point. */
1088 if (gsi_end_p (initially_stmts
))
1090 if (!DECL_ARTIFICIAL (decl
))
1091 warning (OPT_Wmudflap
,
1092 "mudflap cannot track %qE in stub function",
1097 gsi_insert_before (&initially_stmts
, register_fncall
,
1100 /* Accumulate the FINALLY piece. */
1101 gimple_seq_add_stmt (&finally_stmts
, unregister_fncall
);
1106 decl
= TREE_CHAIN (decl
);
1109 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1110 if (finally_stmts
!= NULL
)
1112 gimple stmt
= gimple_build_try (seq
, finally_stmts
, GIMPLE_TRY_FINALLY
);
1113 gimple_seq new_seq
= gimple_seq_alloc ();
1115 gimple_seq_add_stmt (&new_seq
, stmt
);
1123 /* Process every variable mentioned in BIND_EXPRs. */
1125 mx_xfn_xform_decls (gimple_stmt_iterator
*gsi
,
1126 bool *handled_operands_p ATTRIBUTE_UNUSED
,
1127 struct walk_stmt_info
*wi
)
1129 struct mf_xform_decls_data
*d
= (struct mf_xform_decls_data
*) wi
->info
;
1130 gimple stmt
= gsi_stmt (*gsi
);
1132 switch (gimple_code (stmt
))
1136 /* Process function parameters now (but only once). */
1139 gimple_bind_set_body (stmt
,
1140 mx_register_decls (d
->param_decls
,
1141 gimple_bind_body (stmt
),
1142 gimple_location (stmt
)));
1143 d
->param_decls
= NULL_TREE
;
1146 gimple_bind_set_body (stmt
,
1147 mx_register_decls (gimple_bind_vars (stmt
),
1148 gimple_bind_body (stmt
),
1149 gimple_location (stmt
)));
1160 /* Perform the object lifetime tracking mudflap transform on the given function
1161 tree. The tree is mutated in place, with possibly copied subtree nodes.
1163 For every auto variable declared, if its address is ever taken
1164 within the function, then supply its lifetime to the mudflap
1165 runtime with the __mf_register and __mf_unregister calls.
1169 mf_xform_decls (gimple_seq fnbody
, tree fnparams
)
1171 struct mf_xform_decls_data d
;
1172 struct walk_stmt_info wi
;
1173 struct pointer_set_t
*pset
= pointer_set_create ();
1175 d
.param_decls
= fnparams
;
1176 memset (&wi
, 0, sizeof (wi
));
1177 wi
.info
= (void*) &d
;
1179 walk_gimple_seq (fnbody
, mx_xfn_xform_decls
, NULL
, &wi
);
1180 pointer_set_destroy (pset
);
1184 /* ------------------------------------------------------------------------ */
1185 /* Externally visible mudflap functions. */
1188 /* Mark and return the given tree node to prevent further mudflap
1190 static GTY ((param_is (union tree_node
))) htab_t marked_trees
= NULL
;
1197 if (marked_trees
== NULL
)
1198 marked_trees
= htab_create_ggc (31, htab_hash_pointer
, htab_eq_pointer
,
1201 slot
= htab_find_slot (marked_trees
, t
, INSERT
);
1207 mf_marked_p (tree t
)
1211 if (marked_trees
== NULL
)
1214 entry
= htab_find (marked_trees
, t
);
1215 return (entry
!= NULL
);
1218 /* Remember given node as a static of some kind: global data,
1219 function-scope static, or an anonymous constant. Its assembler
1222 /* A list of globals whose incomplete declarations we encountered.
1223 Instead of emitting the __mf_register call for them here, it's
1224 delayed until program finish time. If they're still incomplete by
1225 then, warnings are emitted. */
1227 static GTY (()) VEC(tree
,gc
) *deferred_static_decls
;
1229 /* A list of statements for calling __mf_register() at startup time. */
1230 static GTY (()) tree enqueued_call_stmt_chain
;
1233 mudflap_register_call (tree obj
, tree object_size
, tree varname
)
1235 tree arg
, call_stmt
;
1237 arg
= build1 (ADDR_EXPR
, build_pointer_type (TREE_TYPE (obj
)), obj
);
1238 arg
= convert (ptr_type_node
, arg
);
1240 call_stmt
= build_call_expr (mf_register_fndecl
, 4,
1242 convert (size_type_node
, object_size
),
1243 /* __MF_TYPE_STATIC */
1244 build_int_cst (NULL_TREE
, 4),
1247 append_to_statement_list (call_stmt
, &enqueued_call_stmt_chain
);
1251 mudflap_enqueue_decl (tree obj
)
1253 if (mf_marked_p (obj
))
1256 /* We don't need to process variable decls that are internally
1257 generated extern. If we did, we'd end up with warnings for them
1258 during mudflap_finish_file (). That would confuse the user,
1259 since the text would refer to variables that don't show up in the
1260 user's source code. */
1261 if (DECL_P (obj
) && DECL_EXTERNAL (obj
) && DECL_ARTIFICIAL (obj
))
1264 VEC_safe_push (tree
, gc
, deferred_static_decls
, obj
);
1269 mudflap_enqueue_constant (tree obj
)
1271 tree object_size
, varname
;
1273 if (mf_marked_p (obj
))
1276 if (TREE_CODE (obj
) == STRING_CST
)
1277 object_size
= build_int_cst (NULL_TREE
, TREE_STRING_LENGTH (obj
));
1279 object_size
= size_in_bytes (TREE_TYPE (obj
));
1281 if (TREE_CODE (obj
) == STRING_CST
)
1282 varname
= mf_build_string ("string literal");
1284 varname
= mf_build_string ("constant");
1286 mudflap_register_call (obj
, object_size
, varname
);
1290 /* Emit any file-wide instrumentation. */
1292 mudflap_finish_file (void)
1294 tree ctor_statements
= NULL_TREE
;
1296 /* No need to continue when there were errors. */
1297 if (errorcount
!= 0 || sorrycount
!= 0)
1300 /* Insert a call to __mf_init. */
1302 tree call2_stmt
= build_call_expr (mf_init_fndecl
, 0);
1303 append_to_statement_list (call2_stmt
, &ctor_statements
);
1306 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1307 if (flag_mudflap_ignore_reads
)
1309 tree arg
= mf_build_string ("-ignore-reads");
1310 tree call_stmt
= build_call_expr (mf_set_options_fndecl
, 1, arg
);
1311 append_to_statement_list (call_stmt
, &ctor_statements
);
1314 /* Process all enqueued object decls. */
1315 if (deferred_static_decls
)
1319 for (i
= 0; VEC_iterate (tree
, deferred_static_decls
, i
, obj
); i
++)
1321 gcc_assert (DECL_P (obj
));
1323 if (mf_marked_p (obj
))
1326 /* Omit registration for static unaddressed objects. NB:
1327 Perform registration for non-static objects regardless of
1328 TREE_USED or TREE_ADDRESSABLE, because they may be used
1329 from other compilation units. */
1330 if (! TREE_PUBLIC (obj
) && ! TREE_ADDRESSABLE (obj
))
1333 if (! COMPLETE_TYPE_P (TREE_TYPE (obj
)))
1335 warning (OPT_Wmudflap
,
1336 "mudflap cannot track unknown size extern %qE",
1341 mudflap_register_call (obj
,
1342 size_in_bytes (TREE_TYPE (obj
)),
1343 mf_varname_tree (obj
));
1346 VEC_truncate (tree
, deferred_static_decls
, 0);
1349 /* Append all the enqueued registration calls. */
1350 if (enqueued_call_stmt_chain
)
1352 append_to_statement_list (enqueued_call_stmt_chain
, &ctor_statements
);
1353 enqueued_call_stmt_chain
= NULL_TREE
;
1356 cgraph_build_static_cdtor ('I', ctor_statements
,
1357 MAX_RESERVED_INIT_PRIORITY
-1);
1364 return flag_mudflap
!= 0;
1367 struct gimple_opt_pass pass_mudflap_1
=
1371 "mudflap1", /* name */
1372 gate_mudflap
, /* gate */
1373 execute_mudflap_function_decls
, /* execute */
1376 0, /* static_pass_number */
1377 TV_NONE
, /* tv_id */
1378 PROP_gimple_any
, /* properties_required */
1379 0, /* properties_provided */
1380 0, /* properties_destroyed */
1381 0, /* todo_flags_start */
1382 TODO_dump_func
/* todo_flags_finish */
1386 struct gimple_opt_pass pass_mudflap_2
=
1390 "mudflap2", /* name */
1391 gate_mudflap
, /* gate */
1392 execute_mudflap_function_ops
, /* execute */
1395 0, /* static_pass_number */
1396 TV_NONE
, /* tv_id */
1397 PROP_ssa
| PROP_cfg
| PROP_gimple_leh
,/* properties_required */
1398 0, /* properties_provided */
1399 0, /* properties_destroyed */
1400 0, /* todo_flags_start */
1401 TODO_verify_flow
| TODO_verify_stmts
1402 | TODO_dump_func
| TODO_update_ssa
/* todo_flags_finish */
1406 #include "gt-tree-mudflap.h"