ChangeLog entry:
[official-gcc.git] / gcc / tree-mudflap.c
blobcc8b98d1ea40c59b1b05dfd82644b8a78b3b1015
1 /* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2012
3 Free Software Foundation, Inc.
4 Contributed by Frank Ch. Eigler <fche@redhat.com>
5 and Graydon Hoare <graydon@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "tree-inline.h"
34 #include "gimple.h"
35 #include "tree-iterator.h"
36 #include "tree-flow.h"
37 #include "tree-mudflap.h"
38 #include "tree-dump.h"
39 #include "tree-pass.h"
40 #include "hashtab.h"
41 #include "diagnostic.h"
42 #include "demangle.h"
43 #include "langhooks.h"
44 #include "ggc.h"
45 #include "cgraph.h"
46 #include "gimple.h"
48 extern void add_bb_to_loop (basic_block, struct loop *);
50 /* Internal function decls */
53 /* Options. */
54 #define flag_mudflap_threads (flag_mudflap == 2)
56 /* Helpers. */
57 static tree mf_build_string (const char *string);
58 static tree mf_varname_tree (tree);
59 static tree mf_file_function_line_tree (location_t);
61 /* Indirection-related instrumentation. */
62 static void mf_decl_cache_locals (void);
63 static void mf_decl_clear_locals (void);
64 static void mf_xform_statements (void);
65 static unsigned int execute_mudflap_function_ops (void);
67 /* Addressable variables instrumentation. */
68 static void mf_xform_decls (gimple_seq, tree);
69 static tree mx_xfn_xform_decls (gimple_stmt_iterator *, bool *,
70 struct walk_stmt_info *);
71 static gimple_seq mx_register_decls (tree, gimple_seq, location_t);
72 static unsigned int execute_mudflap_function_decls (void);
74 /* Return true if DECL is artificial stub that shouldn't be instrumented by
75 mf. We should instrument clones of non-artificial functions. */
76 static inline bool
77 mf_artificial (const_tree decl)
79 return DECL_ARTIFICIAL (DECL_ORIGIN (decl));
82 /* ------------------------------------------------------------------------ */
83 /* Some generally helpful functions for mudflap instrumentation. */
85 /* Build a reference to a literal string. */
86 static tree
87 mf_build_string (const char *string)
89 size_t len = strlen (string);
90 tree result = mf_mark (build_string (len + 1, string));
92 TREE_TYPE (result) = build_array_type
93 (char_type_node, build_index_type (size_int (len)));
94 TREE_CONSTANT (result) = 1;
95 TREE_READONLY (result) = 1;
96 TREE_STATIC (result) = 1;
98 result = build1 (ADDR_EXPR, build_pointer_type (char_type_node), result);
100 return mf_mark (result);
103 /* Create a properly typed STRING_CST node that describes the given
104 declaration. It will be used as an argument for __mf_register().
105 Try to construct a helpful string, including file/function/variable
106 name. */
108 static tree
109 mf_varname_tree (tree decl)
111 static pretty_printer buf_rec;
112 static int initialized = 0;
113 pretty_printer *buf = & buf_rec;
114 const char *buf_contents;
115 tree result;
117 gcc_assert (decl);
119 if (!initialized)
121 pp_construct (buf, /* prefix */ NULL, /* line-width */ 0);
122 initialized = 1;
124 pp_clear_output_area (buf);
126 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
128 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (decl));
129 const char *sourcefile;
130 unsigned sourceline = xloc.line;
131 unsigned sourcecolumn = 0;
132 sourcecolumn = xloc.column;
133 sourcefile = xloc.file;
134 if (sourcefile == NULL && current_function_decl != NULL_TREE)
135 sourcefile = DECL_SOURCE_FILE (current_function_decl);
136 if (sourcefile == NULL)
137 sourcefile = "<unknown file>";
139 pp_string (buf, sourcefile);
141 if (sourceline != 0)
143 pp_string (buf, ":");
144 pp_decimal_int (buf, sourceline);
146 if (sourcecolumn != 0)
148 pp_string (buf, ":");
149 pp_decimal_int (buf, sourcecolumn);
154 if (current_function_decl != NULL_TREE)
156 /* Add (FUNCTION) */
157 pp_string (buf, " (");
159 const char *funcname = NULL;
160 if (DECL_NAME (current_function_decl))
161 funcname = lang_hooks.decl_printable_name (current_function_decl, 1);
162 if (funcname == NULL)
163 funcname = "anonymous fn";
165 pp_string (buf, funcname);
167 pp_string (buf, ") ");
169 else
170 pp_string (buf, " ");
172 /* Add <variable-declaration>, possibly demangled. */
174 const char *declname = NULL;
176 if (DECL_NAME (decl) != NULL)
178 if (strcmp ("GNU C++", lang_hooks.name) == 0)
180 /* The gcc/cp decl_printable_name hook doesn't do as good a job as
181 the libiberty demangler. */
182 declname = cplus_demangle (IDENTIFIER_POINTER (DECL_NAME (decl)),
183 DMGL_AUTO | DMGL_VERBOSE);
185 if (declname == NULL)
186 declname = lang_hooks.decl_printable_name (decl, 3);
188 if (declname == NULL)
189 declname = "<unnamed variable>";
191 pp_string (buf, declname);
194 /* Return the lot as a new STRING_CST. */
195 buf_contents = pp_base_formatted_text (buf);
196 result = mf_build_string (buf_contents);
197 pp_clear_output_area (buf);
199 return result;
203 /* And another friend, for producing a simpler message. */
205 static tree
206 mf_file_function_line_tree (location_t location)
208 expanded_location xloc = expand_location (location);
209 const char *file = NULL, *colon, *line, *op, *name, *cp;
210 char linecolbuf[30]; /* Enough for two decimal numbers plus a colon. */
211 char *string;
212 tree result;
214 /* Add FILENAME[:LINENUMBER[:COLUMNNUMBER]]. */
215 file = xloc.file;
216 if (file == NULL && current_function_decl != NULL_TREE)
217 file = DECL_SOURCE_FILE (current_function_decl);
218 if (file == NULL)
219 file = "<unknown file>";
221 if (xloc.line > 0)
223 if (xloc.column > 0)
224 sprintf (linecolbuf, "%d:%d", xloc.line, xloc.column);
225 else
226 sprintf (linecolbuf, "%d", xloc.line);
227 colon = ":";
228 line = linecolbuf;
230 else
231 colon = line = "";
233 /* Add (FUNCTION). */
234 name = lang_hooks.decl_printable_name (current_function_decl, 1);
235 if (name)
237 op = " (";
238 cp = ")";
240 else
241 op = name = cp = "";
243 string = concat (file, colon, line, op, name, cp, NULL);
244 result = mf_build_string (string);
245 free (string);
247 return result;
251 /* global tree nodes */
253 /* Global tree objects for global variables and functions exported by
254 mudflap runtime library. mf_init_extern_trees must be called
255 before using these. */
257 /* uintptr_t (usually "unsigned long") */
258 static GTY (()) tree mf_uintptr_type;
260 /* struct __mf_cache { uintptr_t low; uintptr_t high; }; */
261 static GTY (()) tree mf_cache_struct_type;
263 /* struct __mf_cache * const */
264 static GTY (()) tree mf_cache_structptr_type;
266 /* extern struct __mf_cache __mf_lookup_cache []; */
267 static GTY (()) tree mf_cache_array_decl;
269 /* extern unsigned char __mf_lc_shift; */
270 static GTY (()) tree mf_cache_shift_decl;
272 /* extern uintptr_t __mf_lc_mask; */
273 static GTY (()) tree mf_cache_mask_decl;
275 /* Their function-scope local shadows, used in single-threaded mode only. */
277 /* auto const unsigned char __mf_lc_shift_l; */
278 static GTY (()) tree mf_cache_shift_decl_l;
280 /* auto const uintptr_t __mf_lc_mask_l; */
281 static GTY (()) tree mf_cache_mask_decl_l;
283 /* extern void __mf_check (void *ptr, size_t sz, int type, const char *); */
284 static GTY (()) tree mf_check_fndecl;
286 /* extern void __mf_register (void *ptr, size_t sz, int type, const char *); */
287 static GTY (()) tree mf_register_fndecl;
289 /* extern void __mf_unregister (void *ptr, size_t sz, int type); */
290 static GTY (()) tree mf_unregister_fndecl;
292 /* extern void __mf_init (); */
293 static GTY (()) tree mf_init_fndecl;
295 /* extern int __mf_set_options (const char*); */
296 static GTY (()) tree mf_set_options_fndecl;
299 /* Helper for mudflap_init: construct a decl with the given category,
300 name, and type, mark it an external reference, and pushdecl it. */
301 static inline tree
302 mf_make_builtin (enum tree_code category, const char *name, tree type)
304 tree decl = mf_mark (build_decl (UNKNOWN_LOCATION,
305 category, get_identifier (name), type));
306 TREE_PUBLIC (decl) = 1;
307 DECL_EXTERNAL (decl) = 1;
308 lang_hooks.decls.pushdecl (decl);
309 /* The decl was declared by the compiler. */
310 DECL_ARTIFICIAL (decl) = 1;
311 /* And we don't want debug info for it. */
312 DECL_IGNORED_P (decl) = 1;
313 return decl;
316 /* Helper for mudflap_init: construct a tree corresponding to the type
317 struct __mf_cache { uintptr_t low; uintptr_t high; };
318 where uintptr_t is the FIELD_TYPE argument. */
319 static inline tree
320 mf_make_mf_cache_struct_type (tree field_type)
322 /* There is, abominably, no language-independent way to construct a
323 RECORD_TYPE. So we have to call the basic type construction
324 primitives by hand. */
325 tree fieldlo = build_decl (UNKNOWN_LOCATION,
326 FIELD_DECL, get_identifier ("low"), field_type);
327 tree fieldhi = build_decl (UNKNOWN_LOCATION,
328 FIELD_DECL, get_identifier ("high"), field_type);
330 tree struct_type = make_node (RECORD_TYPE);
331 DECL_CONTEXT (fieldlo) = struct_type;
332 DECL_CONTEXT (fieldhi) = struct_type;
333 DECL_CHAIN (fieldlo) = fieldhi;
334 TYPE_FIELDS (struct_type) = fieldlo;
335 TYPE_NAME (struct_type) = get_identifier ("__mf_cache");
336 layout_type (struct_type);
338 return struct_type;
341 /* Initialize the global tree nodes that correspond to mf-runtime.h
342 declarations. */
343 void
344 mudflap_init (void)
346 static bool done = false;
347 tree mf_const_string_type;
348 tree mf_cache_array_type;
349 tree mf_check_register_fntype;
350 tree mf_unregister_fntype;
351 tree mf_init_fntype;
352 tree mf_set_options_fntype;
354 if (done)
355 return;
356 done = true;
358 mf_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode,
359 /*unsignedp=*/true);
360 mf_const_string_type
361 = build_pointer_type (build_qualified_type
362 (char_type_node, TYPE_QUAL_CONST));
364 mf_cache_struct_type = mf_make_mf_cache_struct_type (mf_uintptr_type);
365 mf_cache_structptr_type = build_pointer_type (mf_cache_struct_type);
366 mf_cache_array_type = build_array_type (mf_cache_struct_type, 0);
367 mf_check_register_fntype =
368 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
369 integer_type_node, mf_const_string_type, NULL_TREE);
370 mf_unregister_fntype =
371 build_function_type_list (void_type_node, ptr_type_node, size_type_node,
372 integer_type_node, NULL_TREE);
373 mf_init_fntype =
374 build_function_type_list (void_type_node, NULL_TREE);
375 mf_set_options_fntype =
376 build_function_type_list (integer_type_node, mf_const_string_type, NULL_TREE);
378 mf_cache_array_decl = mf_make_builtin (VAR_DECL, "__mf_lookup_cache",
379 mf_cache_array_type);
380 mf_cache_shift_decl = mf_make_builtin (VAR_DECL, "__mf_lc_shift",
381 unsigned_char_type_node);
382 mf_cache_mask_decl = mf_make_builtin (VAR_DECL, "__mf_lc_mask",
383 mf_uintptr_type);
384 /* Don't process these in mudflap_enqueue_decl, should they come by
385 there for some reason. */
386 mf_mark (mf_cache_array_decl);
387 mf_mark (mf_cache_shift_decl);
388 mf_mark (mf_cache_mask_decl);
389 mf_check_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_check",
390 mf_check_register_fntype);
391 mf_register_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_register",
392 mf_check_register_fntype);
393 mf_unregister_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_unregister",
394 mf_unregister_fntype);
395 mf_init_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_init",
396 mf_init_fntype);
397 mf_set_options_fndecl = mf_make_builtin (FUNCTION_DECL, "__mf_set_options",
398 mf_set_options_fntype);
402 /* ------------------------------------------------------------------------ */
403 /* This is the second part of the mudflap instrumentation. It works on
404 low-level GIMPLE using the CFG, because we want to run this pass after
405 tree optimizations have been performed, but we have to preserve the CFG
406 for expansion from trees to RTL.
407 Below is the list of transformations performed on statements in the
408 current function.
410 1) Memory reference transforms: Perform the mudflap indirection-related
411 tree transforms on memory references.
413 2) Mark BUILTIN_ALLOCA calls not inlineable.
417 static unsigned int
418 execute_mudflap_function_ops (void)
420 struct gimplify_ctx gctx;
422 /* Don't instrument functions such as the synthetic constructor
423 built during mudflap_finish_file. */
424 if (mf_marked_p (current_function_decl)
425 || mf_artificial (current_function_decl))
426 return 0;
428 push_gimplify_context (&gctx);
430 add_referenced_var (mf_cache_array_decl);
431 add_referenced_var (mf_cache_shift_decl);
432 add_referenced_var (mf_cache_mask_decl);
434 /* In multithreaded mode, don't cache the lookup cache parameters. */
435 if (! flag_mudflap_threads)
436 mf_decl_cache_locals ();
438 mf_xform_statements ();
440 if (! flag_mudflap_threads)
441 mf_decl_clear_locals ();
443 pop_gimplify_context (NULL);
444 return 0;
447 /* Insert a gimple_seq SEQ on all the outgoing edges out of BB. Note that
448 if BB has more than one edge, STMT will be replicated for each edge.
449 Also, abnormal edges will be ignored. */
451 static void
452 insert_edge_copies_seq (gimple_seq seq, basic_block bb)
454 edge e;
455 edge_iterator ei;
456 unsigned n_copies = -1;
458 FOR_EACH_EDGE (e, ei, bb->succs)
459 if (!(e->flags & EDGE_ABNORMAL))
460 n_copies++;
462 FOR_EACH_EDGE (e, ei, bb->succs)
463 if (!(e->flags & EDGE_ABNORMAL))
464 gsi_insert_seq_on_edge (e, n_copies-- > 0 ? gimple_seq_copy (seq) : seq);
467 /* Create and initialize local shadow variables for the lookup cache
468 globals. Put their decls in the *_l globals for use by
469 mf_build_check_statement_for. */
471 static void
472 mf_decl_cache_locals (void)
474 gimple g;
475 gimple_seq seq = gimple_seq_alloc ();
477 /* Build the cache vars. */
478 mf_cache_shift_decl_l
479 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_shift_decl),
480 "__mf_lookup_shift_l"));
482 mf_cache_mask_decl_l
483 = mf_mark (make_rename_temp (TREE_TYPE (mf_cache_mask_decl),
484 "__mf_lookup_mask_l"));
486 /* Build initialization nodes for the cache vars. We just load the
487 globals into the cache variables. */
488 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
489 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
490 gimple_seq_add_stmt (&seq, g);
492 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
493 gimple_set_location (g, DECL_SOURCE_LOCATION (current_function_decl));
494 gimple_seq_add_stmt (&seq, g);
496 insert_edge_copies_seq (seq, ENTRY_BLOCK_PTR);
498 gsi_commit_edge_inserts ();
502 static void
503 mf_decl_clear_locals (void)
505 /* Unset local shadows. */
506 mf_cache_shift_decl_l = NULL_TREE;
507 mf_cache_mask_decl_l = NULL_TREE;
510 static void
511 mf_build_check_statement_for (tree base, tree limit,
512 gimple_stmt_iterator *instr_gsi,
513 location_t location, tree dirflag)
515 gimple_stmt_iterator gsi;
516 basic_block cond_bb, then_bb, join_bb;
517 edge e;
518 tree cond, t, u, v;
519 tree mf_base;
520 tree mf_elem;
521 tree mf_limit;
522 gimple g;
523 gimple_seq seq, stmts;
525 /* We first need to split the current basic block, and start altering
526 the CFG. This allows us to insert the statements we're about to
527 construct into the right basic blocks. */
529 cond_bb = gimple_bb (gsi_stmt (*instr_gsi));
530 gsi = *instr_gsi;
531 gsi_prev (&gsi);
532 if (! gsi_end_p (gsi))
533 e = split_block (cond_bb, gsi_stmt (gsi));
534 else
535 e = split_block_after_labels (cond_bb);
536 cond_bb = e->src;
537 join_bb = e->dest;
539 /* A recap at this point: join_bb is the basic block at whose head
540 is the gimple statement for which this check expression is being
541 built. cond_bb is the (possibly new, synthetic) basic block the
542 end of which will contain the cache-lookup code, and a
543 conditional that jumps to the cache-miss code or, much more
544 likely, over to join_bb. */
546 /* Create the bb that contains the cache-miss fallback block (mf_check). */
547 then_bb = create_empty_bb (cond_bb);
548 make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
549 make_single_succ_edge (then_bb, join_bb, EDGE_FALLTHRU);
551 /* Mark the pseudo-fallthrough edge from cond_bb to join_bb. */
552 e = find_edge (cond_bb, join_bb);
553 e->flags = EDGE_FALSE_VALUE;
554 e->count = cond_bb->count;
555 e->probability = REG_BR_PROB_BASE;
557 /* Update dominance info. Note that bb_join's data was
558 updated by split_block. */
559 if (dom_info_available_p (CDI_DOMINATORS))
561 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
562 set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
565 /* Update loop info. */
566 if (current_loops)
567 add_bb_to_loop (then_bb, cond_bb->loop_father);
569 /* Build our local variables. */
570 mf_elem = make_rename_temp (mf_cache_structptr_type, "__mf_elem");
571 mf_base = make_rename_temp (mf_uintptr_type, "__mf_base");
572 mf_limit = make_rename_temp (mf_uintptr_type, "__mf_limit");
574 /* Build: __mf_base = (uintptr_t) <base address expression>. */
575 seq = gimple_seq_alloc ();
576 t = fold_convert_loc (location, mf_uintptr_type,
577 unshare_expr (base));
578 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
579 gimple_seq_add_seq (&seq, stmts);
580 g = gimple_build_assign (mf_base, t);
581 gimple_set_location (g, location);
582 gimple_seq_add_stmt (&seq, g);
584 /* Build: __mf_limit = (uintptr_t) <limit address expression>. */
585 t = fold_convert_loc (location, mf_uintptr_type,
586 unshare_expr (limit));
587 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
588 gimple_seq_add_seq (&seq, stmts);
589 g = gimple_build_assign (mf_limit, t);
590 gimple_set_location (g, location);
591 gimple_seq_add_stmt (&seq, g);
593 /* Build: __mf_elem = &__mf_lookup_cache [(__mf_base >> __mf_shift)
594 & __mf_mask]. */
595 t = build2 (RSHIFT_EXPR, mf_uintptr_type, mf_base,
596 flag_mudflap_threads ? mf_cache_shift_decl
597 : mf_cache_shift_decl_l);
598 t = build2 (BIT_AND_EXPR, mf_uintptr_type, t,
599 flag_mudflap_threads ? mf_cache_mask_decl
600 : mf_cache_mask_decl_l);
601 t = build4 (ARRAY_REF,
602 TREE_TYPE (TREE_TYPE (mf_cache_array_decl)),
603 mf_cache_array_decl, t, NULL_TREE, NULL_TREE);
604 t = build1 (ADDR_EXPR, mf_cache_structptr_type, t);
605 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
606 gimple_seq_add_seq (&seq, stmts);
607 g = gimple_build_assign (mf_elem, t);
608 gimple_set_location (g, location);
609 gimple_seq_add_stmt (&seq, g);
611 /* Quick validity check.
613 if (__mf_elem->low > __mf_base
614 || (__mf_elem_high < __mf_limit))
616 __mf_check ();
617 ... and only if single-threaded:
618 __mf_lookup_shift_1 = f...;
619 __mf_lookup_mask_l = ...;
622 It is expected that this body of code is rarely executed so we mark
623 the edge to the THEN clause of the conditional jump as unlikely. */
625 /* Construct t <-- '__mf_elem->low > __mf_base'. */
626 t = build3 (COMPONENT_REF, mf_uintptr_type,
627 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
628 TYPE_FIELDS (mf_cache_struct_type), NULL_TREE);
629 t = build2 (GT_EXPR, boolean_type_node, t, mf_base);
631 /* Construct '__mf_elem->high < __mf_limit'.
633 First build:
634 1) u <-- '__mf_elem->high'
635 2) v <-- '__mf_limit'.
637 Then build 'u <-- (u < v). */
639 u = build3 (COMPONENT_REF, mf_uintptr_type,
640 build1 (INDIRECT_REF, mf_cache_struct_type, mf_elem),
641 DECL_CHAIN (TYPE_FIELDS (mf_cache_struct_type)), NULL_TREE);
643 v = mf_limit;
645 u = build2 (LT_EXPR, boolean_type_node, u, v);
647 /* Build the composed conditional: t <-- 't || u'. Then store the
648 result of the evaluation of 't' in a temporary variable which we
649 can use as the condition for the conditional jump. */
650 t = build2 (TRUTH_OR_EXPR, boolean_type_node, t, u);
651 t = force_gimple_operand (t, &stmts, false, NULL_TREE);
652 gimple_seq_add_seq (&seq, stmts);
653 cond = make_rename_temp (boolean_type_node, "__mf_unlikely_cond");
654 g = gimple_build_assign (cond, t);
655 gimple_set_location (g, location);
656 gimple_seq_add_stmt (&seq, g);
658 /* Build the conditional jump. 'cond' is just a temporary so we can
659 simply build a void COND_EXPR. We do need labels in both arms though. */
660 g = gimple_build_cond (NE_EXPR, cond, boolean_false_node, NULL_TREE,
661 NULL_TREE);
662 gimple_set_location (g, location);
663 gimple_seq_add_stmt (&seq, g);
665 /* At this point, after so much hard work, we have only constructed
666 the conditional jump,
668 if (__mf_elem->low > __mf_base
669 || (__mf_elem_high < __mf_limit))
671 The lowered GIMPLE tree representing this code is in the statement
672 list starting at 'head'.
674 We can insert this now in the current basic block, i.e. the one that
675 the statement we're instrumenting was originally in. */
676 gsi = gsi_last_bb (cond_bb);
677 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
679 /* Now build up the body of the cache-miss handling:
681 __mf_check();
682 refresh *_l vars.
684 This is the body of the conditional. */
686 seq = gimple_seq_alloc ();
687 /* u is a string, so it is already a gimple value. */
688 u = mf_file_function_line_tree (location);
689 /* NB: we pass the overall [base..limit] range to mf_check. */
690 v = fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
691 fold_build2_loc (location,
692 MINUS_EXPR, mf_uintptr_type, mf_limit, mf_base),
693 build_int_cst (mf_uintptr_type, 1));
694 v = force_gimple_operand (v, &stmts, true, NULL_TREE);
695 gimple_seq_add_seq (&seq, stmts);
696 g = gimple_build_call (mf_check_fndecl, 4, mf_base, v, dirflag, u);
697 gimple_seq_add_stmt (&seq, g);
699 if (! flag_mudflap_threads)
701 if (stmt_ends_bb_p (g))
703 gsi = gsi_start_bb (then_bb);
704 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
705 e = split_block (then_bb, g);
706 then_bb = e->dest;
707 seq = gimple_seq_alloc ();
710 g = gimple_build_assign (mf_cache_shift_decl_l, mf_cache_shift_decl);
711 gimple_seq_add_stmt (&seq, g);
713 g = gimple_build_assign (mf_cache_mask_decl_l, mf_cache_mask_decl);
714 gimple_seq_add_stmt (&seq, g);
717 /* Insert the check code in the THEN block. */
718 gsi = gsi_start_bb (then_bb);
719 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
721 *instr_gsi = gsi_start_bb (join_bb);
725 /* Check whether the given decl, generally a VAR_DECL or PARM_DECL, is
726 eligible for instrumentation. For the mudflap1 pass, this implies
727 that it should be registered with the libmudflap runtime. For the
728 mudflap2 pass this means instrumenting an indirection operation with
729 respect to the object.
731 static int
732 mf_decl_eligible_p (tree decl)
734 return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
735 /* The decl must have its address taken. In the case of
736 arrays, this flag is also set if the indexes are not
737 compile-time known valid constants. */
738 /* XXX: not sufficient: return-by-value structs! */
739 && TREE_ADDRESSABLE (decl)
740 /* The type of the variable must be complete. */
741 && COMPLETE_OR_VOID_TYPE_P (TREE_TYPE (decl))
742 /* The decl hasn't been decomposed somehow. */
743 && !DECL_HAS_VALUE_EXPR_P (decl));
747 static void
748 mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
749 location_t location, tree dirflag)
751 tree type, base, limit, addr, size, t;
753 /* Don't instrument read operations. */
754 if (dirflag == integer_zero_node && flag_mudflap_ignore_reads)
755 return;
757 /* Don't instrument marked nodes. */
758 if (mf_marked_p (*tp))
759 return;
761 t = *tp;
762 type = TREE_TYPE (t);
764 if (type == error_mark_node)
765 return;
767 size = TYPE_SIZE_UNIT (type);
769 switch (TREE_CODE (t))
771 case ARRAY_REF:
772 case COMPONENT_REF:
774 /* This is trickier than it may first appear. The reason is
775 that we are looking at expressions from the "inside out" at
776 this point. We may have a complex nested aggregate/array
777 expression (e.g. "a.b[i].c"), maybe with an indirection as
778 the leftmost operator ("p->a.b.d"), where instrumentation
779 is necessary. Or we may have an innocent "a.b.c"
780 expression that must not be instrumented. We need to
781 recurse all the way down the nesting structure to figure it
782 out: looking just at the outer node is not enough. */
783 tree var;
784 int component_ref_only = (TREE_CODE (t) == COMPONENT_REF);
785 /* If we have a bitfield component reference, we must note the
786 innermost addressable object in ELT, from which we will
787 construct the byte-addressable bounds of the bitfield. */
788 tree elt = NULL_TREE;
789 int bitfield_ref_p = (TREE_CODE (t) == COMPONENT_REF
790 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (t, 1)));
792 /* Iterate to the top of the ARRAY_REF/COMPONENT_REF
793 containment hierarchy to find the outermost VAR_DECL. */
794 var = TREE_OPERAND (t, 0);
795 while (1)
797 if (bitfield_ref_p && elt == NULL_TREE
798 && (TREE_CODE (var) == ARRAY_REF
799 || TREE_CODE (var) == COMPONENT_REF))
800 elt = var;
802 if (TREE_CODE (var) == ARRAY_REF)
804 component_ref_only = 0;
805 var = TREE_OPERAND (var, 0);
807 else if (TREE_CODE (var) == COMPONENT_REF)
808 var = TREE_OPERAND (var, 0);
809 else if (INDIRECT_REF_P (var)
810 || TREE_CODE (var) == MEM_REF)
812 base = TREE_OPERAND (var, 0);
813 break;
815 else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
817 var = TREE_OPERAND (var, 0);
818 if (CONSTANT_CLASS_P (var)
819 && TREE_CODE (var) != STRING_CST)
820 return;
822 else
824 gcc_assert (TREE_CODE (var) == VAR_DECL
825 || TREE_CODE (var) == PARM_DECL
826 || TREE_CODE (var) == RESULT_DECL
827 || TREE_CODE (var) == STRING_CST);
828 /* Don't instrument this access if the underlying
829 variable is not "eligible". This test matches
830 those arrays that have only known-valid indexes,
831 and thus are not labeled TREE_ADDRESSABLE. */
832 if (! mf_decl_eligible_p (var) || component_ref_only)
833 return;
834 else
836 base = build1 (ADDR_EXPR,
837 build_pointer_type (TREE_TYPE (var)), var);
838 break;
843 /* Handle the case of ordinary non-indirection structure
844 accesses. These have only nested COMPONENT_REF nodes (no
845 INDIRECT_REF), but pass through the above filter loop.
846 Note that it's possible for such a struct variable to match
847 the eligible_p test because someone else might take its
848 address sometime. */
850 /* We need special processing for bitfield components, because
851 their addresses cannot be taken. */
852 if (bitfield_ref_p)
854 tree field = TREE_OPERAND (t, 1);
856 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
857 size = DECL_SIZE_UNIT (field);
859 if (elt)
860 elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
861 elt);
862 addr = fold_convert_loc (location, ptr_type_node, elt ? elt : base);
863 addr = fold_build_pointer_plus_loc (location,
864 addr, byte_position (field));
866 else
867 addr = build1 (ADDR_EXPR, build_pointer_type (type), t);
869 limit = fold_build2_loc (location, MINUS_EXPR, mf_uintptr_type,
870 fold_build2_loc (location, PLUS_EXPR, mf_uintptr_type,
871 fold_convert (mf_uintptr_type, addr),
872 size),
873 integer_one_node);
875 break;
877 case INDIRECT_REF:
878 addr = TREE_OPERAND (t, 0);
879 base = addr;
880 limit = fold_build_pointer_plus_hwi_loc
881 (location, fold_build_pointer_plus_loc (location, base, size), -1);
882 break;
884 case MEM_REF:
885 addr = fold_build_pointer_plus_loc (location, TREE_OPERAND (t, 0),
886 TREE_OPERAND (t, 1));
887 base = addr;
888 limit = fold_build_pointer_plus_hwi_loc (location,
889 fold_build_pointer_plus_loc (location,
890 base, size), -1);
891 break;
893 case TARGET_MEM_REF:
894 addr = tree_mem_ref_addr (ptr_type_node, t);
895 base = addr;
896 limit = fold_build_pointer_plus_hwi_loc (location,
897 fold_build_pointer_plus_loc (location,
898 base, size), -1);
899 break;
901 case ARRAY_RANGE_REF:
902 warning (OPT_Wmudflap,
903 "mudflap checking not yet implemented for ARRAY_RANGE_REF");
904 return;
906 case BIT_FIELD_REF:
907 /* ??? merge with COMPONENT_REF code above? */
909 tree ofs, rem, bpu;
911 /* If we're not dereferencing something, then the access
912 must be ok. */
913 if (TREE_CODE (TREE_OPERAND (t, 0)) != INDIRECT_REF)
914 return;
916 bpu = bitsize_int (BITS_PER_UNIT);
917 ofs = fold_convert (bitsizetype, TREE_OPERAND (t, 2));
918 rem = size_binop_loc (location, TRUNC_MOD_EXPR, ofs, bpu);
919 ofs = size_binop_loc (location, TRUNC_DIV_EXPR, ofs, bpu);
921 size = fold_convert (bitsizetype, TREE_OPERAND (t, 1));
922 size = size_binop_loc (location, PLUS_EXPR, size, rem);
923 size = size_binop_loc (location, CEIL_DIV_EXPR, size, bpu);
924 size = fold_convert (sizetype, size);
926 addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
927 addr = fold_convert (ptr_type_node, addr);
928 addr = fold_build_pointer_plus_loc (location, addr, ofs);
930 base = addr;
931 limit = fold_build_pointer_plus_hwi_loc (location,
932 fold_build_pointer_plus_loc (location,
933 base, size), -1);
935 break;
937 default:
938 return;
941 mf_build_check_statement_for (base, limit, iter, location, dirflag);
943 /* Transform
944 1) Memory references.
946 static void
947 mf_xform_statements (void)
949 basic_block bb, next;
950 gimple_stmt_iterator i;
951 int saved_last_basic_block = last_basic_block;
952 enum gimple_rhs_class grhs_class;
954 bb = ENTRY_BLOCK_PTR ->next_bb;
957 next = bb->next_bb;
958 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
960 gimple s = gsi_stmt (i);
962 /* Only a few GIMPLE statements can reference memory. */
963 switch (gimple_code (s))
965 case GIMPLE_ASSIGN:
966 mf_xform_derefs_1 (&i, gimple_assign_lhs_ptr (s),
967 gimple_location (s), integer_one_node);
968 mf_xform_derefs_1 (&i, gimple_assign_rhs1_ptr (s),
969 gimple_location (s), integer_zero_node);
970 grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
971 if (grhs_class == GIMPLE_BINARY_RHS)
972 mf_xform_derefs_1 (&i, gimple_assign_rhs2_ptr (s),
973 gimple_location (s), integer_zero_node);
974 break;
976 case GIMPLE_RETURN:
977 if (gimple_return_retval (s) != NULL_TREE)
979 mf_xform_derefs_1 (&i, gimple_return_retval_ptr (s),
980 gimple_location (s),
981 integer_zero_node);
983 break;
985 default:
989 bb = next;
991 while (bb && bb->index <= saved_last_basic_block);
994 /* ------------------------------------------------------------------------ */
995 /* ADDR_EXPR transforms. Perform the declaration-related mudflap tree
996 transforms on the current function.
998 This is the first part of the mudflap instrumentation. It works on
999 high-level GIMPLE because after lowering, all variables are moved out
1000 of their BIND_EXPR binding context, and we lose liveness information
1001 for the declarations we wish to instrument. */
1003 static unsigned int
1004 execute_mudflap_function_decls (void)
1006 struct gimplify_ctx gctx;
1008 /* Don't instrument functions such as the synthetic constructor
1009 built during mudflap_finish_file. */
1010 if (mf_marked_p (current_function_decl)
1011 || mf_artificial (current_function_decl))
1012 return 0;
1014 push_gimplify_context (&gctx);
1016 mf_xform_decls (gimple_body (current_function_decl),
1017 DECL_ARGUMENTS (current_function_decl));
1019 pop_gimplify_context (NULL);
1020 return 0;
1023 /* This struct is passed between mf_xform_decls to store state needed
1024 during the traversal searching for objects that have their
1025 addresses taken. */
1026 struct mf_xform_decls_data
1028 tree param_decls;
1032 /* Synthesize a CALL_EXPR and a TRY_FINALLY_EXPR, for this chain of
1033 _DECLs if appropriate. Arrange to call the __mf_register function
1034 now, and the __mf_unregister function later for each. Return the
1035 gimple sequence after synthesis. */
1036 gimple_seq
1037 mx_register_decls (tree decl, gimple_seq seq, location_t location)
1039 gimple_seq finally_stmts = NULL;
1040 gimple_stmt_iterator initially_stmts = gsi_start (seq);
1042 while (decl != NULL_TREE)
1044 if (mf_decl_eligible_p (decl)
1045 /* Not already processed. */
1046 && ! mf_marked_p (decl)
1047 /* Automatic variable. */
1048 && ! DECL_EXTERNAL (decl)
1049 && ! TREE_STATIC (decl))
1051 tree size = NULL_TREE, variable_name;
1052 gimple unregister_fncall, register_fncall;
1053 tree unregister_fncall_param, register_fncall_param;
1055 /* Variable-sized objects should have sizes already been
1056 gimplified when we got here. */
1057 size = fold_convert (size_type_node,
1058 TYPE_SIZE_UNIT (TREE_TYPE (decl)));
1059 gcc_assert (is_gimple_val (size));
1062 unregister_fncall_param =
1063 mf_mark (build1 (ADDR_EXPR,
1064 build_pointer_type (TREE_TYPE (decl)),
1065 decl));
1066 /* __mf_unregister (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK) */
1067 unregister_fncall = gimple_build_call (mf_unregister_fndecl, 3,
1068 unregister_fncall_param,
1069 size,
1070 integer_three_node);
1073 variable_name = mf_varname_tree (decl);
1074 register_fncall_param =
1075 mf_mark (build1 (ADDR_EXPR,
1076 build_pointer_type (TREE_TYPE (decl)),
1077 decl));
1078 /* __mf_register (&VARIABLE, sizeof (VARIABLE), __MF_TYPE_STACK,
1079 "name") */
1080 register_fncall = gimple_build_call (mf_register_fndecl, 4,
1081 register_fncall_param,
1082 size,
1083 integer_three_node,
1084 variable_name);
1087 /* Accumulate the two calls. */
1088 gimple_set_location (register_fncall, location);
1089 gimple_set_location (unregister_fncall, location);
1091 /* Add the __mf_register call at the current appending point. */
1092 if (gsi_end_p (initially_stmts))
1094 if (!mf_artificial (decl))
1095 warning (OPT_Wmudflap,
1096 "mudflap cannot track %qE in stub function",
1097 DECL_NAME (decl));
1099 else
1101 gsi_insert_before (&initially_stmts, register_fncall,
1102 GSI_SAME_STMT);
1104 /* Accumulate the FINALLY piece. */
1105 gimple_seq_add_stmt (&finally_stmts, unregister_fncall);
1107 mf_mark (decl);
1110 decl = DECL_CHAIN (decl);
1113 /* Actually, (initially_stmts!=NULL) <=> (finally_stmts!=NULL) */
1114 if (finally_stmts != NULL)
1116 gimple stmt = gimple_build_try (seq, finally_stmts, GIMPLE_TRY_FINALLY);
1117 gimple_seq new_seq = gimple_seq_alloc ();
1119 gimple_seq_add_stmt (&new_seq, stmt);
1120 return new_seq;
1122 else
1123 return seq;
1127 /* Process every variable mentioned in BIND_EXPRs. */
1128 static tree
1129 mx_xfn_xform_decls (gimple_stmt_iterator *gsi,
1130 bool *handled_operands_p ATTRIBUTE_UNUSED,
1131 struct walk_stmt_info *wi)
1133 struct mf_xform_decls_data *d = (struct mf_xform_decls_data *) wi->info;
1134 gimple stmt = gsi_stmt (*gsi);
1136 switch (gimple_code (stmt))
1138 case GIMPLE_BIND:
1140 /* Process function parameters now (but only once). */
1141 if (d->param_decls)
1143 gimple_bind_set_body (stmt,
1144 mx_register_decls (d->param_decls,
1145 gimple_bind_body (stmt),
1146 gimple_location (stmt)));
1147 d->param_decls = NULL_TREE;
1150 gimple_bind_set_body (stmt,
1151 mx_register_decls (gimple_bind_vars (stmt),
1152 gimple_bind_body (stmt),
1153 gimple_location (stmt)));
1155 break;
1157 default:
1158 break;
1161 return NULL_TREE;
1164 /* Perform the object lifetime tracking mudflap transform on the given function
1165 tree. The tree is mutated in place, with possibly copied subtree nodes.
1167 For every auto variable declared, if its address is ever taken
1168 within the function, then supply its lifetime to the mudflap
1169 runtime with the __mf_register and __mf_unregister calls.
1172 static void
1173 mf_xform_decls (gimple_seq fnbody, tree fnparams)
1175 struct mf_xform_decls_data d;
1176 struct walk_stmt_info wi;
1177 struct pointer_set_t *pset = pointer_set_create ();
1179 d.param_decls = fnparams;
1180 memset (&wi, 0, sizeof (wi));
1181 wi.info = (void*) &d;
1182 wi.pset = pset;
1183 walk_gimple_seq (fnbody, mx_xfn_xform_decls, NULL, &wi);
1184 pointer_set_destroy (pset);
1188 /* ------------------------------------------------------------------------ */
1189 /* Externally visible mudflap functions. */
1192 /* Mark and return the given tree node to prevent further mudflap
1193 transforms. */
1194 static GTY ((param_is (union tree_node))) htab_t marked_trees = NULL;
1196 tree
1197 mf_mark (tree t)
1199 void **slot;
1201 if (marked_trees == NULL)
1202 marked_trees = htab_create_ggc (31, htab_hash_pointer, htab_eq_pointer,
1203 NULL);
1205 slot = htab_find_slot (marked_trees, t, INSERT);
1206 *slot = t;
1207 return t;
1211 mf_marked_p (tree t)
1213 void *entry;
1215 if (marked_trees == NULL)
1216 return 0;
1218 entry = htab_find (marked_trees, t);
1219 return (entry != NULL);
1222 /* Remember given node as a static of some kind: global data,
1223 function-scope static, or an anonymous constant. Its assembler
1224 label is given. */
1226 /* A list of globals whose incomplete declarations we encountered.
1227 Instead of emitting the __mf_register call for them here, it's
1228 delayed until program finish time. If they're still incomplete by
1229 then, warnings are emitted. */
1231 static GTY (()) VEC(tree,gc) *deferred_static_decls;
1233 /* A list of statements for calling __mf_register() at startup time. */
1234 static GTY (()) tree enqueued_call_stmt_chain;
1236 static void
1237 mudflap_register_call (tree obj, tree object_size, tree varname)
1239 tree arg, call_stmt;
1241 arg = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (obj)), obj);
1242 arg = fold_convert (ptr_type_node, arg);
1244 call_stmt = build_call_expr (mf_register_fndecl, 4,
1245 arg,
1246 fold_convert (size_type_node, object_size),
1247 /* __MF_TYPE_STATIC */
1248 build_int_cst (integer_type_node, 4),
1249 varname);
1251 append_to_statement_list (call_stmt, &enqueued_call_stmt_chain);
1254 void
1255 mudflap_enqueue_decl (tree obj)
1257 if (mf_marked_p (obj))
1258 return;
1260 /* We don't need to process variable decls that are internally
1261 generated extern. If we did, we'd end up with warnings for them
1262 during mudflap_finish_file (). That would confuse the user,
1263 since the text would refer to variables that don't show up in the
1264 user's source code. */
1265 if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
1266 return;
1268 VEC_safe_push (tree, gc, deferred_static_decls, obj);
1272 void
1273 mudflap_enqueue_constant (tree obj)
1275 tree object_size, varname;
1277 if (mf_marked_p (obj))
1278 return;
1280 if (TREE_CODE (obj) == STRING_CST)
1281 object_size = size_int (TREE_STRING_LENGTH (obj));
1282 else
1283 object_size = size_in_bytes (TREE_TYPE (obj));
1285 if (TREE_CODE (obj) == STRING_CST)
1286 varname = mf_build_string ("string literal");
1287 else
1288 varname = mf_build_string ("constant");
1290 mudflap_register_call (obj, object_size, varname);
1294 /* Emit any file-wide instrumentation. */
1295 void
1296 mudflap_finish_file (void)
1298 tree ctor_statements = NULL_TREE;
1300 /* No need to continue when there were errors. */
1301 if (seen_error ())
1302 return;
1304 /* Insert a call to __mf_init. */
1306 tree call2_stmt = build_call_expr (mf_init_fndecl, 0);
1307 append_to_statement_list (call2_stmt, &ctor_statements);
1310 /* If appropriate, call __mf_set_options to pass along read-ignore mode. */
1311 if (flag_mudflap_ignore_reads)
1313 tree arg = mf_build_string ("-ignore-reads");
1314 tree call_stmt = build_call_expr (mf_set_options_fndecl, 1, arg);
1315 append_to_statement_list (call_stmt, &ctor_statements);
1318 /* Process all enqueued object decls. */
1319 if (deferred_static_decls)
1321 size_t i;
1322 tree obj;
1323 FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
1325 gcc_assert (DECL_P (obj));
1327 if (mf_marked_p (obj))
1328 continue;
1330 /* Omit registration for static unaddressed objects. NB:
1331 Perform registration for non-static objects regardless of
1332 TREE_USED or TREE_ADDRESSABLE, because they may be used
1333 from other compilation units. */
1334 if (! TREE_PUBLIC (obj) && ! TREE_ADDRESSABLE (obj))
1335 continue;
1337 if (! COMPLETE_TYPE_P (TREE_TYPE (obj)))
1339 warning (OPT_Wmudflap,
1340 "mudflap cannot track unknown size extern %qE",
1341 DECL_NAME (obj));
1342 continue;
1345 mudflap_register_call (obj,
1346 size_in_bytes (TREE_TYPE (obj)),
1347 mf_varname_tree (obj));
1350 VEC_truncate (tree, deferred_static_decls, 0);
1353 /* Append all the enqueued registration calls. */
1354 if (enqueued_call_stmt_chain)
1356 append_to_statement_list (enqueued_call_stmt_chain, &ctor_statements);
1357 enqueued_call_stmt_chain = NULL_TREE;
1360 cgraph_build_static_cdtor ('I', ctor_statements,
1361 MAX_RESERVED_INIT_PRIORITY-1);
1365 static bool
1366 gate_mudflap (void)
1368 return flag_mudflap != 0;
1371 struct gimple_opt_pass pass_mudflap_1 =
1374 GIMPLE_PASS,
1375 "mudflap1", /* name */
1376 gate_mudflap, /* gate */
1377 execute_mudflap_function_decls, /* execute */
1378 NULL, /* sub */
1379 NULL, /* next */
1380 0, /* static_pass_number */
1381 TV_NONE, /* tv_id */
1382 PROP_gimple_any, /* properties_required */
1383 0, /* properties_provided */
1384 0, /* properties_destroyed */
1385 0, /* todo_flags_start */
1386 0 /* todo_flags_finish */
1390 struct gimple_opt_pass pass_mudflap_2 =
1393 GIMPLE_PASS,
1394 "mudflap2", /* name */
1395 gate_mudflap, /* gate */
1396 execute_mudflap_function_ops, /* execute */
1397 NULL, /* sub */
1398 NULL, /* next */
1399 0, /* static_pass_number */
1400 TV_NONE, /* tv_id */
1401 PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required */
1402 0, /* properties_provided */
1403 0, /* properties_destroyed */
1404 0, /* todo_flags_start */
1405 TODO_verify_flow | TODO_verify_stmts
1406 | TODO_update_ssa /* todo_flags_finish */
1410 #include "gt-tree-mudflap.h"