1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
44 #include "diagnostic.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
56 #include "tree-scalar-evolution.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 static const char *pure_const_names
[3] = {"const", "pure", "neither"};
84 static const char *malloc_state_names
[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
91 funct_state_d (): pure_const_state (IPA_NEITHER
),
92 state_previously_known (IPA_NEITHER
), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM
) {}
96 funct_state_d (const funct_state_d
&s
): pure_const_state (s
.pure_const_state
),
97 state_previously_known (s
.state_previously_known
),
98 looping_previously_known (s
.looping_previously_known
),
99 looping (s
.looping
), can_throw (s
.can_throw
), can_free (s
.can_free
),
100 malloc_state (s
.malloc_state
) {}
103 enum pure_const_state_e pure_const_state
;
104 /* What user set here; we can be always sure about this. */
105 enum pure_const_state_e state_previously_known
;
106 bool looping_previously_known
;
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
122 enum malloc_state_e malloc_state
;
125 typedef class funct_state_d
* funct_state
;
127 /* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
131 class funct_state_summary_t
:
132 public fast_function_summary
<funct_state_d
*, va_heap
>
135 funct_state_summary_t (symbol_table
*symtab
):
136 fast_function_summary
<funct_state_d
*, va_heap
> (symtab
) {}
138 virtual void insert (cgraph_node
*, funct_state_d
*state
);
139 virtual void duplicate (cgraph_node
*src_node
, cgraph_node
*dst_node
,
140 funct_state_d
*src_data
,
141 funct_state_d
*dst_data
);
144 static funct_state_summary_t
*funct_state_summaries
= NULL
;
146 static bool gate_pure_const (void);
150 const pass_data pass_data_ipa_pure_const
=
153 "pure-const", /* name */
154 OPTGROUP_NONE
, /* optinfo_flags */
155 TV_IPA_PURE_CONST
, /* tv_id */
156 0, /* properties_required */
157 0, /* properties_provided */
158 0, /* properties_destroyed */
159 0, /* todo_flags_start */
160 0, /* todo_flags_finish */
163 class pass_ipa_pure_const
: public ipa_opt_pass_d
166 pass_ipa_pure_const(gcc::context
*ctxt
);
168 /* opt_pass methods: */
169 bool gate (function
*) { return gate_pure_const (); }
170 unsigned int execute (function
*fun
);
172 void register_hooks (void);
176 }; // class pass_ipa_pure_const
180 /* Try to guess if function body will always be visible to compiler
181 when compiling the call and whether compiler will be able
182 to propagate the information by itself. */
185 function_always_visible_to_compiler_p (tree decl
)
187 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
)
188 || DECL_COMDAT (decl
));
191 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
192 is true if the function is known to be finite. The diagnostic is
193 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
194 OPTION, this function may initialize it and it is always returned
197 static hash_set
<tree
> *
198 suggest_attribute (int option
, tree decl
, bool known_finite
,
199 hash_set
<tree
> *warned_about
,
200 const char * attrib_name
)
202 if (!option_enabled (option
, lang_hooks
.option_lang_mask (), &global_options
))
204 if (TREE_THIS_VOLATILE (decl
)
205 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
209 warned_about
= new hash_set
<tree
>;
210 if (warned_about
->contains (decl
))
212 warned_about
->add (decl
);
213 warning_at (DECL_SOURCE_LOCATION (decl
),
216 ? G_("function might be candidate for attribute %qs")
217 : G_("function might be candidate for attribute %qs"
218 " if it is known to return normally"), attrib_name
);
222 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
223 is true if the function is known to be finite. */
226 warn_function_pure (tree decl
, bool known_finite
)
228 /* Declaring a void function pure makes no sense and is diagnosed
229 by -Wattributes because calling it would have no effect. */
230 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
233 static hash_set
<tree
> *warned_about
;
235 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
236 known_finite
, warned_about
, "pure");
239 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
240 is true if the function is known to be finite. */
243 warn_function_const (tree decl
, bool known_finite
)
245 /* Declaring a void function const makes no sense is diagnosed
246 by -Wattributes because calling it would have no effect. */
247 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
250 static hash_set
<tree
> *warned_about
;
252 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
253 known_finite
, warned_about
, "const");
256 /* Emit suggestion about __attribute__((malloc)) for DECL. */
259 warn_function_malloc (tree decl
)
261 static hash_set
<tree
> *warned_about
;
263 = suggest_attribute (OPT_Wsuggest_attribute_malloc
, decl
,
264 true, warned_about
, "malloc");
267 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
270 warn_function_noreturn (tree decl
)
272 tree original_decl
= decl
;
274 static hash_set
<tree
> *warned_about
;
275 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
276 && targetm
.warn_func_return (decl
))
278 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, original_decl
,
279 true, warned_about
, "noreturn");
283 warn_function_cold (tree decl
)
285 tree original_decl
= decl
;
287 static hash_set
<tree
> *warned_about
;
289 = suggest_attribute (OPT_Wsuggest_attribute_cold
, original_decl
,
290 true, warned_about
, "cold");
293 /* Check to see if the use (or definition when CHECKING_WRITE is true)
294 variable T is legal in a function that is either pure or const. */
297 check_decl (funct_state local
,
298 tree t
, bool checking_write
, bool ipa
)
300 /* Do not want to do anything with volatile except mark any
301 function that uses one to be not const or pure. */
302 if (TREE_THIS_VOLATILE (t
))
304 local
->pure_const_state
= IPA_NEITHER
;
306 fprintf (dump_file
, " Volatile operand is not const/pure\n");
310 /* Do not care about a local automatic that is not static. */
311 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
314 /* If the variable has the "used" attribute, treat it as if it had a
315 been touched by the devil. */
316 if (DECL_PRESERVE_P (t
))
318 local
->pure_const_state
= IPA_NEITHER
;
320 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
324 /* In IPA mode we are not interested in checking actual loads and stores;
325 they will be processed at propagation time using ipa_ref. */
329 /* Since we have dealt with the locals and params cases above, if we
330 are CHECKING_WRITE, this cannot be a pure or constant
334 local
->pure_const_state
= IPA_NEITHER
;
336 fprintf (dump_file
, " static/global memory write is not const/pure\n");
340 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
342 /* Readonly reads are safe. */
343 if (TREE_READONLY (t
))
344 return; /* Read of a constant, do not change the function state. */
348 fprintf (dump_file
, " global memory read is not const\n");
349 /* Just a regular read. */
350 if (local
->pure_const_state
== IPA_CONST
)
351 local
->pure_const_state
= IPA_PURE
;
356 /* Compilation level statics can be read if they are readonly
358 if (TREE_READONLY (t
))
362 fprintf (dump_file
, " static memory read is not const\n");
363 /* Just a regular read. */
364 if (local
->pure_const_state
== IPA_CONST
)
365 local
->pure_const_state
= IPA_PURE
;
370 /* Check to see if the use (or definition when CHECKING_WRITE is true)
371 variable T is legal in a function that is either pure or const. */
374 check_op (funct_state local
, tree t
, bool checking_write
)
376 t
= get_base_address (t
);
377 if (t
&& TREE_THIS_VOLATILE (t
))
379 local
->pure_const_state
= IPA_NEITHER
;
381 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
384 else if (refs_local_or_readonly_memory_p (t
))
387 fprintf (dump_file
, " Indirect ref to local or readonly "
391 else if (checking_write
)
393 local
->pure_const_state
= IPA_NEITHER
;
395 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
401 fprintf (dump_file
, " Indirect ref read is not const\n");
402 if (local
->pure_const_state
== IPA_CONST
)
403 local
->pure_const_state
= IPA_PURE
;
407 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
410 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
411 int flags
, bool cannot_lead_to_return
)
414 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
418 fprintf (dump_file
, " looping\n");
420 if (flags
& ECF_CONST
)
423 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
424 fprintf (dump_file
, " const\n");
426 else if (flags
& ECF_PURE
)
429 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
430 fprintf (dump_file
, " pure\n");
432 else if (cannot_lead_to_return
)
436 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
437 fprintf (dump_file
, " ignoring side effects->pure looping\n");
441 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
442 fprintf (dump_file
, " neither\n");
443 *state
= IPA_NEITHER
;
448 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
449 into STATE and LOOPING better of the two variants.
450 Be sure to merge looping correctly. IPA_NEITHER functions
451 have looping 0 even if they don't have to return. */
454 better_state (enum pure_const_state_e
*state
, bool *looping
,
455 enum pure_const_state_e state2
, bool looping2
)
459 if (*state
== IPA_NEITHER
)
462 *looping
= MIN (*looping
, looping2
);
465 else if (state2
!= IPA_NEITHER
)
466 *looping
= MIN (*looping
, looping2
);
469 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
470 into STATE and LOOPING worse of the two variants.
471 N is the actual node called. */
474 worse_state (enum pure_const_state_e
*state
, bool *looping
,
475 enum pure_const_state_e state2
, bool looping2
,
476 struct symtab_node
*from
,
477 struct symtab_node
*to
)
479 /* Consider function:
486 During early optimization we will turn this into:
493 Now if this function will be detected as CONST however when interposed it
494 may end up being just pure. We always must assume the worst scenario here.
496 if (*state
== IPA_CONST
&& state2
== IPA_CONST
497 && to
&& !TREE_READONLY (to
->decl
) && !to
->binds_to_current_def_p (from
))
499 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
500 fprintf (dump_file
, "Dropping state to PURE because call to %s may not "
501 "bind to current def.\n", to
->dump_name ());
504 *state
= MAX (*state
, state2
);
505 *looping
= MAX (*looping
, looping2
);
508 /* Recognize special cases of builtins that are by themselves not pure or const
509 but function using them is. */
511 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
514 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
515 switch (DECL_FUNCTION_CODE (callee
))
517 case BUILT_IN_RETURN
:
518 case BUILT_IN_UNREACHABLE
:
519 CASE_BUILT_IN_ALLOCA
:
520 case BUILT_IN_STACK_SAVE
:
521 case BUILT_IN_STACK_RESTORE
:
522 case BUILT_IN_EH_POINTER
:
523 case BUILT_IN_EH_FILTER
:
524 case BUILT_IN_UNWIND_RESUME
:
525 case BUILT_IN_CXA_END_CLEANUP
:
526 case BUILT_IN_EH_COPY_VALUES
:
527 case BUILT_IN_FRAME_ADDRESS
:
528 case BUILT_IN_APPLY_ARGS
:
529 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
:
530 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
:
534 case BUILT_IN_PREFETCH
:
544 /* Check the parameters of a function call to CALL_EXPR to see if
545 there are any references in the parameters that are not allowed for
546 pure or const functions. Also check to see if this is either an
547 indirect call, a call outside the compilation unit, or has special
548 attributes that may also effect the purity. The CALL_EXPR node for
549 the entire call expression. */
552 check_call (funct_state local
, gcall
*call
, bool ipa
)
554 int flags
= gimple_call_flags (call
);
555 tree callee_t
= gimple_call_fndecl (call
);
556 bool possibly_throws
= stmt_could_throw_p (cfun
, call
);
557 bool possibly_throws_externally
= (possibly_throws
558 && stmt_can_throw_external (cfun
, call
));
563 for (i
= 0; i
< gimple_num_ops (call
); i
++)
564 if (gimple_op (call
, i
)
565 && tree_could_throw_p (gimple_op (call
, i
)))
567 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
570 fprintf (dump_file
, " operand can throw; looping\n");
571 local
->looping
= true;
573 if (possibly_throws_externally
)
576 fprintf (dump_file
, " operand can throw externally\n");
577 local
->can_throw
= true;
582 /* The const and pure flags are set by a variety of places in the
583 compiler (including here). If someone has already set the flags
584 for the callee, (such as for some of the builtins) we will use
585 them, otherwise we will compute our own information.
587 Const and pure functions have less clobber effects than other
588 functions so we process these first. Otherwise if it is a call
589 outside the compilation unit or an indirect call we punt. This
590 leaves local calls which will be processed by following the call
594 enum pure_const_state_e call_state
;
597 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
598 && !nonfreeing_call_p (call
))
599 local
->can_free
= true;
601 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
603 worse_state (&local
->pure_const_state
, &local
->looping
,
604 call_state
, call_looping
,
608 /* When bad things happen to bad functions, they cannot be const
610 if (setjmp_call_p (callee_t
))
613 fprintf (dump_file
, " setjmp is not const/pure\n");
614 local
->looping
= true;
615 local
->pure_const_state
= IPA_NEITHER
;
618 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
619 switch (DECL_FUNCTION_CODE (callee_t
))
621 case BUILT_IN_LONGJMP
:
622 case BUILT_IN_NONLOCAL_GOTO
:
625 " longjmp and nonlocal goto is not const/pure\n");
626 local
->pure_const_state
= IPA_NEITHER
;
627 local
->looping
= true;
633 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
634 local
->can_free
= true;
636 /* When not in IPA mode, we can still handle self recursion. */
638 && recursive_call_p (current_function_decl
, callee_t
))
641 fprintf (dump_file
, " Recursive call can loop.\n");
642 local
->looping
= true;
644 /* Either callee is unknown or we are doing local analysis.
645 Look to see if there are any bits available for the callee (such as by
646 declaration or because it is builtin) and process solely on the basis of
647 those bits. Handle internal calls always, those calls don't have
648 corresponding cgraph edges and thus aren't processed during
650 else if (!ipa
|| gimple_call_internal_p (call
))
652 enum pure_const_state_e call_state
;
654 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
657 fprintf (dump_file
, " can throw; looping\n");
658 local
->looping
= true;
660 if (possibly_throws_externally
)
664 fprintf (dump_file
, " can throw externally to lp %i\n",
665 lookup_stmt_eh_lp (call
));
667 fprintf (dump_file
, " callee:%s\n",
668 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
670 local
->can_throw
= true;
672 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
673 fprintf (dump_file
, " checking flags for call:");
674 state_from_flags (&call_state
, &call_looping
, flags
,
675 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
676 == (ECF_NORETURN
| ECF_NOTHROW
))
677 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
678 worse_state (&local
->pure_const_state
, &local
->looping
,
679 call_state
, call_looping
, NULL
, NULL
);
681 /* Direct functions calls are handled by IPA propagation. */
684 /* Wrapper around check_decl for loads in local more. */
687 check_load (gimple
*, tree op
, tree
, void *data
)
690 check_decl ((funct_state
)data
, op
, false, false);
692 check_op ((funct_state
)data
, op
, false);
696 /* Wrapper around check_decl for stores in local more. */
699 check_store (gimple
*, tree op
, tree
, void *data
)
702 check_decl ((funct_state
)data
, op
, true, false);
704 check_op ((funct_state
)data
, op
, true);
708 /* Wrapper around check_decl for loads in ipa mode. */
711 check_ipa_load (gimple
*, tree op
, tree
, void *data
)
714 check_decl ((funct_state
)data
, op
, false, true);
716 check_op ((funct_state
)data
, op
, false);
720 /* Wrapper around check_decl for stores in ipa mode. */
723 check_ipa_store (gimple
*, tree op
, tree
, void *data
)
726 check_decl ((funct_state
)data
, op
, true, true);
728 check_op ((funct_state
)data
, op
, true);
732 /* Look into pointer pointed to by GSIP and figure out what interesting side
735 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
737 gimple
*stmt
= gsi_stmt (*gsip
);
739 if (is_gimple_debug (stmt
))
742 /* Do consider clobber as side effects before IPA, so we rather inline
743 C++ destructors and keep clobber semantics than eliminate them.
745 TODO: We may get smarter during early optimizations on these and let
746 functions containing only clobbers to be optimized more. This is a common
747 case of C++ destructors. */
749 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
754 fprintf (dump_file
, " scanning: ");
755 print_gimple_stmt (dump_file
, stmt
, 0);
758 if (gimple_has_volatile_ops (stmt
)
759 && !gimple_clobber_p (stmt
))
761 local
->pure_const_state
= IPA_NEITHER
;
763 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
766 /* Look for loads and stores. */
767 walk_stmt_load_store_ops (stmt
, local
,
768 ipa
? check_ipa_load
: check_load
,
769 ipa
? check_ipa_store
: check_store
);
771 if (gimple_code (stmt
) != GIMPLE_CALL
772 && stmt_could_throw_p (cfun
, stmt
))
774 if (cfun
->can_throw_non_call_exceptions
)
777 fprintf (dump_file
, " can throw; looping\n");
778 local
->looping
= true;
780 if (stmt_can_throw_external (cfun
, stmt
))
783 fprintf (dump_file
, " can throw externally\n");
784 local
->can_throw
= true;
788 fprintf (dump_file
, " can throw\n");
790 switch (gimple_code (stmt
))
793 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
796 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
797 /* Target of long jump. */
800 fprintf (dump_file
, " nonlocal label is not const/pure\n");
801 local
->pure_const_state
= IPA_NEITHER
;
805 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
808 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
809 /* Abandon all hope, ye who enter here. */
810 local
->pure_const_state
= IPA_NEITHER
;
811 local
->can_free
= true;
813 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
816 fprintf (dump_file
, " volatile is not const/pure\n");
817 /* Abandon all hope, ye who enter here. */
818 local
->pure_const_state
= IPA_NEITHER
;
819 local
->looping
= true;
820 local
->can_free
= true;
828 /* Check that RETVAL is used only in STMT and in comparisons against 0.
829 RETVAL is return value of the function and STMT is return stmt. */
832 check_retval_uses (tree retval
, gimple
*stmt
)
834 imm_use_iterator use_iter
;
837 FOR_EACH_IMM_USE_STMT (use_stmt
, use_iter
, retval
)
838 if (gcond
*cond
= dyn_cast
<gcond
*> (use_stmt
))
840 tree op2
= gimple_cond_rhs (cond
);
841 if (!integer_zerop (op2
))
842 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
844 else if (gassign
*ga
= dyn_cast
<gassign
*> (use_stmt
))
846 enum tree_code code
= gimple_assign_rhs_code (ga
);
847 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
848 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
849 if (!integer_zerop (gimple_assign_rhs2 (ga
)))
850 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
852 else if (is_gimple_debug (use_stmt
))
854 else if (use_stmt
!= stmt
)
855 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
860 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
861 attribute. Currently this function does a very conservative analysis.
862 FUN is considered to be a candidate if
863 1) It returns a value of pointer type.
864 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
865 a phi, and element of phi is either NULL or
866 SSA_NAME_DEF_STMT(element) is function call.
867 3) The return-value has immediate uses only within comparisons (gcond or gassign)
868 and return_stmt (and likewise a phi arg has immediate use only within comparison
871 #define DUMP_AND_RETURN(reason) \
873 if (dump_file && (dump_flags & TDF_DETAILS)) \
874 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
875 (node->dump_name ()), (reason)); \
880 malloc_candidate_p_1 (function
*fun
, tree retval
, gimple
*ret_stmt
, bool ipa
,
883 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
884 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (retval
)))
887 if (!check_retval_uses (retval
, ret_stmt
))
888 DUMP_AND_RETURN("Return value has uses outside return stmt"
889 " and comparisons against 0.")
891 gimple
*def
= SSA_NAME_DEF_STMT (retval
);
893 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (def
))
895 tree callee_decl
= gimple_call_fndecl (call_stmt
);
899 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
900 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
903 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
906 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
907 es
->is_return_callee_uncaptured
= true;
911 else if (gphi
*phi
= dyn_cast
<gphi
*> (def
))
913 bool all_args_zero
= true;
914 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
916 tree arg
= gimple_phi_arg_def (phi
, i
);
917 if (integer_zerop (arg
))
920 all_args_zero
= false;
921 if (TREE_CODE (arg
) != SSA_NAME
)
922 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
923 if (!check_retval_uses (arg
, phi
))
924 DUMP_AND_RETURN ("phi arg has uses outside phi"
925 " and comparisons against 0.")
927 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
928 if (is_a
<gphi
*> (arg_def
))
930 if (!malloc_candidate_p_1 (fun
, arg
, phi
, ipa
, visited
))
931 DUMP_AND_RETURN ("nested phi fail")
935 gcall
*call_stmt
= dyn_cast
<gcall
*> (arg_def
);
937 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
939 tree callee_decl
= gimple_call_fndecl (call_stmt
);
942 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
943 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
944 " for non-ipa mode.")
946 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
949 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
950 es
->is_return_callee_uncaptured
= true;
955 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
959 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
965 malloc_candidate_p (function
*fun
, bool ipa
)
967 basic_block exit_block
= EXIT_BLOCK_PTR_FOR_FN (fun
);
970 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
972 if (EDGE_COUNT (exit_block
->preds
) == 0
973 || !flag_delete_null_pointer_checks
)
977 FOR_EACH_EDGE (e
, ei
, exit_block
->preds
)
979 gimple_stmt_iterator gsi
= gsi_last_bb (e
->src
);
980 greturn
*ret_stmt
= dyn_cast
<greturn
*> (gsi_stmt (gsi
));
985 tree retval
= gimple_return_retval (ret_stmt
);
987 DUMP_AND_RETURN("No return value.")
989 if (TREE_CODE (retval
) != SSA_NAME
990 || TREE_CODE (TREE_TYPE (retval
)) != POINTER_TYPE
)
991 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
993 if (!malloc_candidate_p_1 (fun
, retval
, ret_stmt
, ipa
, visited
))
997 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
998 fprintf (dump_file
, "\nFound %s to be candidate for malloc attribute\n",
999 IDENTIFIER_POINTER (DECL_NAME (fun
->decl
)));
1003 #undef DUMP_AND_RETURN
1005 /* This is the main routine for finding the reference patterns for
1006 global variables within a function FN. */
1009 analyze_function (struct cgraph_node
*fn
, bool ipa
)
1011 tree decl
= fn
->decl
;
1013 basic_block this_block
;
1015 l
= XCNEW (class funct_state_d
);
1016 l
->pure_const_state
= IPA_CONST
;
1017 l
->state_previously_known
= IPA_NEITHER
;
1018 l
->looping_previously_known
= true;
1020 l
->can_throw
= false;
1021 l
->can_free
= false;
1022 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
1023 flags_from_decl_or_type (fn
->decl
),
1024 fn
->cannot_return_p ());
1026 if (fn
->thunk
.thunk_p
|| fn
->alias
)
1028 /* Thunk gets propagated through, so nothing interesting happens. */
1030 if (fn
->thunk
.thunk_p
&& fn
->thunk
.virtual_offset_p
)
1031 l
->pure_const_state
= IPA_NEITHER
;
1037 fprintf (dump_file
, "\n\n local analysis of %s\n ",
1041 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1043 FOR_EACH_BB_FN (this_block
, cfun
)
1045 gimple_stmt_iterator gsi
;
1046 struct walk_stmt_info wi
;
1048 memset (&wi
, 0, sizeof (wi
));
1049 for (gsi
= gsi_start_bb (this_block
);
1053 check_stmt (&gsi
, l
, ipa
);
1054 if (l
->pure_const_state
== IPA_NEITHER
1063 if (l
->pure_const_state
!= IPA_NEITHER
)
1065 /* Const functions cannot have back edges (an
1066 indication of possible infinite loop side
1068 if (mark_dfs_back_edges ())
1070 /* Preheaders are needed for SCEV to work.
1071 Simple latches and recorded exits improve chances that loop will
1072 proved to be finite in testcases such as in loop-15.c
1074 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1075 | LOOPS_HAVE_SIMPLE_LATCHES
1076 | LOOPS_HAVE_RECORDED_EXITS
);
1077 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1078 flow_loops_dump (dump_file
, NULL
, 0);
1079 if (mark_irreducible_loops ())
1082 fprintf (dump_file
, " has irreducible loops\n");
1089 FOR_EACH_LOOP (loop
, 0)
1090 if (!finite_loop_p (loop
))
1093 fprintf (dump_file
, " cannot prove finiteness of "
1094 "loop %i\n", loop
->num
);
1100 loop_optimizer_finalize ();
1104 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1105 fprintf (dump_file
, " checking previously known:");
1107 better_state (&l
->pure_const_state
, &l
->looping
,
1108 l
->state_previously_known
,
1109 l
->looping_previously_known
);
1110 if (TREE_NOTHROW (decl
))
1111 l
->can_throw
= false;
1113 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1114 if (DECL_IS_MALLOC (decl
))
1115 l
->malloc_state
= STATE_MALLOC
;
1116 else if (ipa
&& malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), true))
1117 l
->malloc_state
= STATE_MALLOC_TOP
;
1118 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), false))
1119 l
->malloc_state
= STATE_MALLOC
;
1125 fprintf (dump_file
, "Function is locally looping.\n");
1127 fprintf (dump_file
, "Function is locally throwing.\n");
1128 if (l
->pure_const_state
== IPA_CONST
)
1129 fprintf (dump_file
, "Function is locally const.\n");
1130 if (l
->pure_const_state
== IPA_PURE
)
1131 fprintf (dump_file
, "Function is locally pure.\n");
1133 fprintf (dump_file
, "Function can locally free.\n");
1134 if (l
->malloc_state
== STATE_MALLOC
)
1135 fprintf (dump_file
, "Function is locally malloc.\n");
1141 funct_state_summary_t::insert (cgraph_node
*node
, funct_state_d
*state
)
1143 /* There are some shared nodes, in particular the initializers on
1144 static declarations. We do not need to scan them more than once
1145 since all we would be interested in are the addressof
1147 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1149 funct_state_d
*a
= analyze_function (node
, true);
1150 new (state
) funct_state_d (*a
);
1155 /* Called when new clone is inserted to callgraph late. */
1158 funct_state_summary_t::duplicate (cgraph_node
*, cgraph_node
*dst
,
1159 funct_state_d
*src_data
,
1160 funct_state_d
*dst_data
)
1162 new (dst_data
) funct_state_d (*src_data
);
1163 if (dst_data
->malloc_state
== STATE_MALLOC
1164 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst
->decl
))))
1165 dst_data
->malloc_state
= STATE_MALLOC_BOTTOM
;
1170 pass_ipa_pure_const::
1171 register_hooks (void)
1178 funct_state_summaries
= new funct_state_summary_t (symtab
);
1182 /* Analyze each function in the cgraph to see if it is locally PURE or
1186 pure_const_generate_summary (void)
1188 struct cgraph_node
*node
;
1190 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1191 pass
->register_hooks ();
1193 /* Process all of the functions.
1195 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1196 by default, but the info can be used at LTO with -fwhole-program or
1197 when function got cloned and the clone is AVAILABLE. */
1199 FOR_EACH_DEFINED_FUNCTION (node
)
1200 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1202 funct_state_d
*a
= analyze_function (node
, true);
1203 new (funct_state_summaries
->get_create (node
)) funct_state_d (*a
);
1209 /* Serialize the ipa info for lto. */
1212 pure_const_write_summary (void)
1214 struct cgraph_node
*node
;
1215 struct lto_simple_output_block
*ob
1216 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1217 unsigned int count
= 0;
1218 lto_symtab_encoder_iterator lsei
;
1219 lto_symtab_encoder_t encoder
;
1221 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1223 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1224 lsei_next_function_in_partition (&lsei
))
1226 node
= lsei_cgraph_node (lsei
);
1227 if (node
->definition
&& funct_state_summaries
->exists (node
))
1231 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1233 /* Process all of the functions. */
1234 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1235 lsei_next_function_in_partition (&lsei
))
1237 node
= lsei_cgraph_node (lsei
);
1238 funct_state_d
*fs
= funct_state_summaries
->get (node
);
1239 if (node
->definition
&& fs
!= NULL
)
1241 struct bitpack_d bp
;
1243 lto_symtab_encoder_t encoder
;
1245 encoder
= ob
->decl_state
->symtab_node_encoder
;
1246 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1247 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1249 /* Note that flags will need to be read in the opposite
1250 order as we are pushing the bitflags into FLAGS. */
1251 bp
= bitpack_create (ob
->main_stream
);
1252 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1253 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1254 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1255 bp_pack_value (&bp
, fs
->looping
, 1);
1256 bp_pack_value (&bp
, fs
->can_throw
, 1);
1257 bp_pack_value (&bp
, fs
->can_free
, 1);
1258 bp_pack_value (&bp
, fs
->malloc_state
, 2);
1259 streamer_write_bitpack (&bp
);
1263 lto_destroy_simple_output_block (ob
);
1267 /* Deserialize the ipa info for lto. */
1270 pure_const_read_summary (void)
1272 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1273 struct lto_file_decl_data
*file_data
;
1276 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1277 pass
->register_hooks ();
1279 while ((file_data
= file_data_vec
[j
++]))
1283 class lto_input_block
*ib
1284 = lto_create_simple_input_block (file_data
,
1285 LTO_section_ipa_pure_const
,
1290 unsigned int count
= streamer_read_uhwi (ib
);
1292 for (i
= 0; i
< count
; i
++)
1295 struct cgraph_node
*node
;
1296 struct bitpack_d bp
;
1298 lto_symtab_encoder_t encoder
;
1300 index
= streamer_read_uhwi (ib
);
1301 encoder
= file_data
->symtab_node_encoder
;
1302 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1305 fs
= funct_state_summaries
->get_create (node
);
1306 /* Note that the flags must be read in the opposite
1307 order in which they were written (the bitflags were
1308 pushed into FLAGS). */
1309 bp
= streamer_read_bitpack (ib
);
1310 fs
->pure_const_state
1311 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1312 fs
->state_previously_known
1313 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1314 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1315 fs
->looping
= bp_unpack_value (&bp
, 1);
1316 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1317 fs
->can_free
= bp_unpack_value (&bp
, 1);
1319 = (enum malloc_state_e
) bp_unpack_value (&bp
, 2);
1323 int flags
= flags_from_decl_or_type (node
->decl
);
1324 fprintf (dump_file
, "Read info for %s ", node
->dump_name ());
1325 if (flags
& ECF_CONST
)
1326 fprintf (dump_file
, " const");
1327 if (flags
& ECF_PURE
)
1328 fprintf (dump_file
, " pure");
1329 if (flags
& ECF_NOTHROW
)
1330 fprintf (dump_file
, " nothrow");
1331 fprintf (dump_file
, "\n pure const state: %s\n",
1332 pure_const_names
[fs
->pure_const_state
]);
1333 fprintf (dump_file
, " previously known state: %s\n",
1334 pure_const_names
[fs
->state_previously_known
]);
1336 fprintf (dump_file
," function is locally looping\n");
1337 if (fs
->looping_previously_known
)
1338 fprintf (dump_file
," function is previously known looping\n");
1340 fprintf (dump_file
," function is locally throwing\n");
1342 fprintf (dump_file
," function can locally free\n");
1343 fprintf (dump_file
, "\n malloc state: %s\n",
1344 malloc_state_names
[fs
->malloc_state
]);
1348 lto_destroy_simple_input_block (file_data
,
1349 LTO_section_ipa_pure_const
,
1355 /* We only propagate across edges that can throw externally and their callee
1356 is not interposable. */
1359 ignore_edge_for_nothrow (struct cgraph_edge
*e
)
1361 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1364 enum availability avail
;
1365 cgraph_node
*ultimate_target
1366 = e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1367 if (avail
<= AVAIL_INTERPOSABLE
|| TREE_NOTHROW (ultimate_target
->decl
))
1369 return ((opt_for_fn (e
->callee
->decl
, flag_non_call_exceptions
)
1370 && !e
->callee
->binds_to_current_def_p (e
->caller
))
1371 || !opt_for_fn (e
->caller
->decl
, flag_ipa_pure_const
)
1372 || !opt_for_fn (ultimate_target
->decl
, flag_ipa_pure_const
));
1375 /* Return true if NODE is self recursive function.
1376 Indirectly recursive functions appears as non-trivial strongly
1377 connected components, so we need to care about self recursion
1381 self_recursive_p (struct cgraph_node
*node
)
1383 struct cgraph_edge
*e
;
1384 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1385 if (e
->callee
->function_symbol () == node
)
1390 /* Return true if N is cdtor that is not const or pure. In this case we may
1391 need to remove unreachable function if it is marked const/pure. */
1394 cdtor_p (cgraph_node
*n
, void *)
1396 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1397 return ((!TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
))
1398 || DECL_LOOPING_CONST_OR_PURE_P (n
->decl
));
1402 /* Skip edges from and to nodes without ipa_pure_const enabled.
1403 Ignore not available symbols. */
1406 ignore_edge_for_pure_const (struct cgraph_edge
*e
)
1408 enum availability avail
;
1409 cgraph_node
*ultimate_target
1410 = e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1412 return (avail
<= AVAIL_INTERPOSABLE
1413 || !opt_for_fn (e
->caller
->decl
, flag_ipa_pure_const
)
1414 || !opt_for_fn (ultimate_target
->decl
,
1415 flag_ipa_pure_const
));
1418 /* Produce transitive closure over the callgraph and compute pure/const
1422 propagate_pure_const (void)
1424 struct cgraph_node
*node
;
1425 struct cgraph_node
*w
;
1426 struct cgraph_node
**order
=
1427 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1430 struct ipa_dfs_info
* w_info
;
1431 bool remove_p
= false;
1434 order_pos
= ipa_reduced_postorder (order
, true,
1435 ignore_edge_for_pure_const
);
1438 cgraph_node::dump_cgraph (dump_file
);
1439 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1442 /* Propagate the local information through the call graph to produce
1443 the global information. All the nodes within a cycle will have
1444 the same info so we collapse cycles first. Then we can do the
1445 propagation in one pass from the leaves to the roots. */
1446 for (i
= 0; i
< order_pos
; i
++ )
1448 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1449 bool looping
= false;
1456 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1457 fprintf (dump_file
, "Starting cycle\n");
1459 /* Find the worst state for any node in the cycle. */
1461 while (w
&& pure_const_state
!= IPA_NEITHER
)
1463 struct cgraph_edge
*e
;
1464 struct cgraph_edge
*ie
;
1466 struct ipa_ref
*ref
= NULL
;
1468 funct_state w_l
= funct_state_summaries
->get_create (w
);
1469 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1470 fprintf (dump_file
, " Visiting %s state:%s looping %i\n",
1472 pure_const_names
[w_l
->pure_const_state
],
1475 /* First merge in function body properties.
1476 We are safe to pass NULL as FROM and TO because we will take care
1477 of possible interposition when walking callees. */
1478 worse_state (&pure_const_state
, &looping
,
1479 w_l
->pure_const_state
, w_l
->looping
,
1481 if (pure_const_state
== IPA_NEITHER
)
1486 /* We consider recursive cycles as possibly infinite.
1487 This might be relaxed since infinite recursion leads to stack
1492 /* Now walk the edges and merge in callee properties. */
1493 for (e
= w
->callees
; e
&& pure_const_state
!= IPA_NEITHER
;
1496 enum availability avail
;
1497 struct cgraph_node
*y
= e
->callee
->
1498 function_or_virtual_thunk_symbol (&avail
,
1500 enum pure_const_state_e edge_state
= IPA_CONST
;
1501 bool edge_looping
= false;
1503 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1505 fprintf (dump_file
, " Call to %s",
1506 e
->callee
->dump_name ());
1508 if (avail
> AVAIL_INTERPOSABLE
)
1510 funct_state y_l
= funct_state_summaries
->get_create (y
);
1512 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1515 " state:%s looping:%i\n",
1516 pure_const_names
[y_l
->pure_const_state
],
1519 if (y_l
->pure_const_state
> IPA_PURE
1520 && e
->cannot_lead_to_return_p ())
1522 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1524 " Ignoring side effects"
1525 " -> pure, looping\n");
1526 edge_state
= IPA_PURE
;
1527 edge_looping
= true;
1531 edge_state
= y_l
->pure_const_state
;
1532 edge_looping
= y_l
->looping
;
1535 else if (special_builtin_state (&edge_state
, &edge_looping
,
1539 state_from_flags (&edge_state
, &edge_looping
,
1540 flags_from_decl_or_type (y
->decl
),
1541 e
->cannot_lead_to_return_p ());
1543 /* Merge the results with what we already know. */
1544 better_state (&edge_state
, &edge_looping
,
1545 w_l
->state_previously_known
,
1546 w_l
->looping_previously_known
);
1547 worse_state (&pure_const_state
, &looping
,
1548 edge_state
, edge_looping
, e
->caller
, e
->callee
);
1549 if (pure_const_state
== IPA_NEITHER
)
1553 /* Now process the indirect call. */
1554 for (ie
= w
->indirect_calls
;
1555 ie
&& pure_const_state
!= IPA_NEITHER
; ie
= ie
->next_callee
)
1557 enum pure_const_state_e edge_state
= IPA_CONST
;
1558 bool edge_looping
= false;
1560 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1561 fprintf (dump_file
, " Indirect call");
1562 state_from_flags (&edge_state
, &edge_looping
,
1563 ie
->indirect_info
->ecf_flags
,
1564 ie
->cannot_lead_to_return_p ());
1565 /* Merge the results with what we already know. */
1566 better_state (&edge_state
, &edge_looping
,
1567 w_l
->state_previously_known
,
1568 w_l
->looping_previously_known
);
1569 worse_state (&pure_const_state
, &looping
,
1570 edge_state
, edge_looping
, NULL
, NULL
);
1571 if (pure_const_state
== IPA_NEITHER
)
1575 /* And finally all loads and stores. */
1576 for (i
= 0; w
->iterate_reference (i
, ref
)
1577 && pure_const_state
!= IPA_NEITHER
; i
++)
1579 enum pure_const_state_e ref_state
= IPA_CONST
;
1580 bool ref_looping
= false;
1584 /* readonly reads are safe. */
1585 if (TREE_READONLY (ref
->referred
->decl
))
1587 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1588 fprintf (dump_file
, " nonreadonly global var read\n");
1589 ref_state
= IPA_PURE
;
1592 if (ref
->cannot_lead_to_return ())
1594 ref_state
= IPA_NEITHER
;
1595 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1596 fprintf (dump_file
, " global var write\n");
1603 better_state (&ref_state
, &ref_looping
,
1604 w_l
->state_previously_known
,
1605 w_l
->looping_previously_known
);
1606 worse_state (&pure_const_state
, &looping
,
1607 ref_state
, ref_looping
, NULL
, NULL
);
1608 if (pure_const_state
== IPA_NEITHER
)
1611 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1612 w
= w_info
->next_cycle
;
1614 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1615 fprintf (dump_file
, "Result %s looping %i\n",
1616 pure_const_names
[pure_const_state
],
1619 /* Find the worst state of can_free for any node in the cycle. */
1620 bool can_free
= false;
1622 while (w
&& !can_free
)
1624 struct cgraph_edge
*e
;
1625 funct_state w_l
= funct_state_summaries
->get (w
);
1628 || w
->get_availability () == AVAIL_INTERPOSABLE
1629 || w
->indirect_calls
)
1632 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1634 enum availability avail
;
1635 struct cgraph_node
*y
= e
->callee
->
1636 function_or_virtual_thunk_symbol (&avail
,
1639 if (avail
> AVAIL_INTERPOSABLE
)
1640 can_free
= funct_state_summaries
->get (y
)->can_free
;
1644 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1645 w
= w_info
->next_cycle
;
1648 /* Copy back the region's pure_const_state which is shared by
1649 all nodes in the region. */
1653 funct_state w_l
= funct_state_summaries
->get (w
);
1654 enum pure_const_state_e this_state
= pure_const_state
;
1655 bool this_looping
= looping
;
1657 w_l
->can_free
= can_free
;
1658 w
->nonfreeing_fn
= !can_free
;
1659 if (!can_free
&& dump_file
)
1660 fprintf (dump_file
, "Function found not to call free: %s\n",
1663 if (w_l
->state_previously_known
!= IPA_NEITHER
1664 && this_state
> w_l
->state_previously_known
)
1666 this_state
= w_l
->state_previously_known
;
1667 if (this_state
== IPA_NEITHER
)
1668 this_looping
= w_l
->looping_previously_known
;
1670 if (!this_looping
&& self_recursive_p (w
))
1671 this_looping
= true;
1672 if (!w_l
->looping_previously_known
)
1673 this_looping
= false;
1675 /* All nodes within a cycle share the same info. */
1676 w_l
->pure_const_state
= this_state
;
1677 w_l
->looping
= this_looping
;
1679 /* Inline clones share declaration with their offline copies;
1680 do not modify their declarations since the offline copy may
1686 if (!TREE_READONLY (w
->decl
))
1688 warn_function_const (w
->decl
, !this_looping
);
1690 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1691 this_looping
? "looping " : "",
1694 /* Turning constructor or destructor to non-looping const/pure
1695 enables us to possibly remove the function completely. */
1699 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1701 if (w
->set_const_flag (true, this_looping
))
1705 "Declaration updated to be %sconst: %s\n",
1706 this_looping
? "looping " : "",
1708 remove_p
|= has_cdtor
;
1713 if (!DECL_PURE_P (w
->decl
))
1715 warn_function_pure (w
->decl
, !this_looping
);
1717 fprintf (dump_file
, "Function found to be %spure: %s\n",
1718 this_looping
? "looping " : "",
1724 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1726 if (w
->set_pure_flag (true, this_looping
))
1730 "Declaration updated to be %spure: %s\n",
1731 this_looping
? "looping " : "",
1733 remove_p
|= has_cdtor
;
1740 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1741 w
= w_info
->next_cycle
;
1745 ipa_free_postorder_info ();
1750 /* Produce transitive closure over the callgraph and compute nothrow
1754 propagate_nothrow (void)
1756 struct cgraph_node
*node
;
1757 struct cgraph_node
*w
;
1758 struct cgraph_node
**order
=
1759 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1762 struct ipa_dfs_info
* w_info
;
1764 order_pos
= ipa_reduced_postorder (order
, true,
1765 ignore_edge_for_nothrow
);
1768 cgraph_node::dump_cgraph (dump_file
);
1769 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1772 /* Propagate the local information through the call graph to produce
1773 the global information. All the nodes within a cycle will have
1774 the same info so we collapse cycles first. Then we can do the
1775 propagation in one pass from the leaves to the roots. */
1776 for (i
= 0; i
< order_pos
; i
++ )
1778 bool can_throw
= false;
1784 /* Find the worst state for any node in the cycle. */
1786 while (w
&& !can_throw
)
1788 struct cgraph_edge
*e
, *ie
;
1790 if (!TREE_NOTHROW (w
->decl
))
1792 funct_state w_l
= funct_state_summaries
->get_create (w
);
1795 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1798 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1800 enum availability avail
;
1802 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1805 struct cgraph_node
*y
= e
->callee
->
1806 function_or_virtual_thunk_symbol (&avail
,
1809 /* We can use info about the callee only if we know it
1810 cannot be interposed.
1811 When callee is compiled with non-call exceptions we also
1812 must check that the declaration is bound to current
1813 body as other semantically equivalent body may still
1815 if (avail
<= AVAIL_INTERPOSABLE
1816 || (!TREE_NOTHROW (y
->decl
)
1817 && (funct_state_summaries
->get_create (y
)->can_throw
1818 || (opt_for_fn (y
->decl
, flag_non_call_exceptions
)
1819 && !e
->callee
->binds_to_current_def_p (w
)))))
1822 for (ie
= w
->indirect_calls
; ie
&& !can_throw
;
1823 ie
= ie
->next_callee
)
1824 if (ie
->can_throw_external
1825 && !(ie
->indirect_info
->ecf_flags
& ECF_NOTHROW
))
1828 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1829 w
= w_info
->next_cycle
;
1832 /* Copy back the region's pure_const_state which is shared by
1833 all nodes in the region. */
1837 funct_state w_l
= funct_state_summaries
->get_create (w
);
1838 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1840 /* Inline clones share declaration with their offline copies;
1841 do not modify their declarations since the offline copy may
1845 w
->set_nothrow_flag (true);
1847 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1851 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1852 w_l
->can_throw
= true;
1853 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1854 w
= w_info
->next_cycle
;
1858 ipa_free_postorder_info ();
1862 /* Debugging function to dump state of malloc lattice. */
1866 dump_malloc_lattice (FILE *dump_file
, const char *s
)
1871 fprintf (dump_file
, "\n\nMALLOC LATTICE %s:\n", s
);
1873 FOR_EACH_FUNCTION (node
)
1875 funct_state fs
= funct_state_summaries
->get (node
);
1877 fprintf (dump_file
, "%s: %s\n", node
->dump_name (),
1878 malloc_state_names
[fs
->malloc_state
]);
1882 /* Propagate malloc attribute across the callgraph. */
1885 propagate_malloc (void)
1888 FOR_EACH_FUNCTION (node
)
1890 if (DECL_IS_MALLOC (node
->decl
))
1891 if (!funct_state_summaries
->exists (node
))
1893 funct_state fs
= funct_state_summaries
->get_create (node
);
1894 fs
->malloc_state
= STATE_MALLOC
;
1898 dump_malloc_lattice (dump_file
, "Initial");
1899 struct cgraph_node
**order
1900 = XNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1901 int order_pos
= ipa_reverse_postorder (order
);
1902 bool changed
= true;
1907 /* Walk in postorder. */
1908 for (int i
= order_pos
- 1; i
>= 0; --i
)
1910 cgraph_node
*node
= order
[i
];
1912 || !node
->definition
1913 || !funct_state_summaries
->exists (node
))
1916 funct_state l
= funct_state_summaries
->get (node
);
1918 /* FIXME: add support for indirect-calls. */
1919 if (node
->indirect_calls
)
1921 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1925 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
1927 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1931 if (l
->malloc_state
== STATE_MALLOC_BOTTOM
)
1934 vec
<cgraph_node
*> callees
= vNULL
;
1935 for (cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1937 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
1938 if (es
&& es
->is_return_callee_uncaptured
)
1939 callees
.safe_push (cs
->callee
);
1942 malloc_state_e new_state
= l
->malloc_state
;
1943 for (unsigned j
= 0; j
< callees
.length (); j
++)
1945 cgraph_node
*callee
= callees
[j
];
1946 if (!funct_state_summaries
->exists (node
))
1948 new_state
= STATE_MALLOC_BOTTOM
;
1951 malloc_state_e callee_state
1952 = funct_state_summaries
->get_create (callee
)->malloc_state
;
1953 if (new_state
< callee_state
)
1954 new_state
= callee_state
;
1956 if (new_state
!= l
->malloc_state
)
1959 l
->malloc_state
= new_state
;
1964 FOR_EACH_DEFINED_FUNCTION (node
)
1965 if (funct_state_summaries
->exists (node
))
1967 funct_state l
= funct_state_summaries
->get (node
);
1969 && l
->malloc_state
== STATE_MALLOC
1970 && !node
->inlined_to
)
1972 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1973 fprintf (dump_file
, "Function %s found to be malloc\n",
1974 node
->dump_name ());
1976 bool malloc_decl_p
= DECL_IS_MALLOC (node
->decl
);
1977 node
->set_malloc_flag (true);
1978 if (!malloc_decl_p
&& warn_suggest_attribute_malloc
)
1979 warn_function_malloc (node
->decl
);
1983 dump_malloc_lattice (dump_file
, "after propagation");
1984 ipa_free_postorder_info ();
1988 /* Produce the global information by preforming a transitive closure
1989 on the local information that was produced by generate_summary. */
1992 pass_ipa_pure_const::
1993 execute (function
*)
1997 /* Nothrow makes more function to not lead to return and improve
1999 propagate_nothrow ();
2000 propagate_malloc ();
2001 remove_p
= propagate_pure_const ();
2003 delete funct_state_summaries
;
2004 return remove_p
? TODO_remove_functions
: 0;
2008 gate_pure_const (void)
2010 return flag_ipa_pure_const
|| in_lto_p
;
2013 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
2014 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
2015 pure_const_generate_summary
, /* generate_summary */
2016 pure_const_write_summary
, /* write_summary */
2017 pure_const_read_summary
, /* read_summary */
2018 NULL
, /* write_optimization_summary */
2019 NULL
, /* read_optimization_summary */
2020 NULL
, /* stmt_fixup */
2021 0, /* function_transform_todo_flags_start */
2022 NULL
, /* function_transform */
2023 NULL
), /* variable_transform */
2027 make_pass_ipa_pure_const (gcc::context
*ctxt
)
2029 return new pass_ipa_pure_const (ctxt
);
2032 /* Return true if function should be skipped for local pure const analysis. */
2035 skip_function_for_local_pure_const (struct cgraph_node
*node
)
2037 /* Because we do not schedule pass_fixup_cfg over whole program after early
2038 optimizations we must not promote functions that are called by already
2039 processed functions. */
2041 if (function_called_by_processed_nodes_p ())
2044 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
2047 /* Save some work and do not analyze functions which are interposable and
2048 do not have any non-interposable aliases. */
2049 if (node
->get_availability () <= AVAIL_INTERPOSABLE
2050 && !node
->has_aliases_p ())
2054 "Function is interposable; not analyzing.\n");
2060 /* Simple local pass for pure const discovery reusing the analysis from
2061 ipa_pure_const. This pass is effective when executed together with
2062 other optimization passes in early optimization pass queue. */
2066 const pass_data pass_data_local_pure_const
=
2068 GIMPLE_PASS
, /* type */
2069 "local-pure-const", /* name */
2070 OPTGROUP_NONE
, /* optinfo_flags */
2071 TV_IPA_PURE_CONST
, /* tv_id */
2072 0, /* properties_required */
2073 0, /* properties_provided */
2074 0, /* properties_destroyed */
2075 0, /* todo_flags_start */
2076 0, /* todo_flags_finish */
2079 class pass_local_pure_const
: public gimple_opt_pass
2082 pass_local_pure_const (gcc::context
*ctxt
)
2083 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
2086 /* opt_pass methods: */
2087 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
2088 virtual bool gate (function
*) { return gate_pure_const (); }
2089 virtual unsigned int execute (function
*);
2091 }; // class pass_local_pure_const
2094 pass_local_pure_const::execute (function
*fun
)
2096 bool changed
= false;
2099 struct cgraph_node
*node
;
2101 node
= cgraph_node::get (current_function_decl
);
2102 skip
= skip_function_for_local_pure_const (node
);
2104 if (!warn_suggest_attribute_const
2105 && !warn_suggest_attribute_pure
2109 l
= analyze_function (node
, false);
2111 /* Do NORETURN discovery. */
2112 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
2113 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2115 warn_function_noreturn (fun
->decl
);
2117 fprintf (dump_file
, "Function found to be noreturn: %s\n",
2118 current_function_name ());
2120 /* Update declaration and reduce profile to executed once. */
2121 TREE_THIS_VOLATILE (current_function_decl
) = 1;
2122 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
2123 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2128 switch (l
->pure_const_state
)
2131 if (!TREE_READONLY (current_function_decl
))
2133 warn_function_const (current_function_decl
, !l
->looping
);
2135 fprintf (dump_file
, "Function found to be %sconst: %s\n",
2136 l
->looping
? "looping " : "",
2137 current_function_name ());
2139 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2143 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2144 current_function_name ());
2146 if (!skip
&& node
->set_const_flag (true, l
->looping
))
2149 fprintf (dump_file
, "Declaration updated to be %sconst: %s\n",
2150 l
->looping
? "looping " : "",
2151 current_function_name ());
2157 if (!DECL_PURE_P (current_function_decl
))
2159 warn_function_pure (current_function_decl
, !l
->looping
);
2161 fprintf (dump_file
, "Function found to be %spure: %s\n",
2162 l
->looping
? "looping " : "",
2163 current_function_name ());
2165 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2169 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2170 current_function_name ());
2172 if (!skip
&& node
->set_pure_flag (true, l
->looping
))
2175 fprintf (dump_file
, "Declaration updated to be %spure: %s\n",
2176 l
->looping
? "looping " : "",
2177 current_function_name ());
2185 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
2187 node
->set_nothrow_flag (true);
2190 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2191 current_function_name ());
2194 if (l
->malloc_state
== STATE_MALLOC
2195 && !DECL_IS_MALLOC (current_function_decl
))
2197 node
->set_malloc_flag (true);
2198 if (warn_suggest_attribute_malloc
)
2199 warn_function_malloc (node
->decl
);
2202 fprintf (dump_file
, "Function found to be malloc: %s\n",
2203 node
->dump_name ());
2208 return execute_fixup_cfg ();
2216 make_pass_local_pure_const (gcc::context
*ctxt
)
2218 return new pass_local_pure_const (ctxt
);
2221 /* Emit noreturn warnings. */
2225 const pass_data pass_data_warn_function_noreturn
=
2227 GIMPLE_PASS
, /* type */
2228 "*warn_function_noreturn", /* name */
2229 OPTGROUP_NONE
, /* optinfo_flags */
2230 TV_NONE
, /* tv_id */
2231 PROP_cfg
, /* properties_required */
2232 0, /* properties_provided */
2233 0, /* properties_destroyed */
2234 0, /* todo_flags_start */
2235 0, /* todo_flags_finish */
2238 class pass_warn_function_noreturn
: public gimple_opt_pass
2241 pass_warn_function_noreturn (gcc::context
*ctxt
)
2242 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
2245 /* opt_pass methods: */
2246 virtual bool gate (function
*) { return warn_suggest_attribute_noreturn
; }
2247 virtual unsigned int execute (function
*fun
)
2249 if (!TREE_THIS_VOLATILE (current_function_decl
)
2250 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2251 warn_function_noreturn (current_function_decl
);
2255 }; // class pass_warn_function_noreturn
2260 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
2262 return new pass_warn_function_noreturn (ctxt
);
2265 /* Simple local pass for pure const discovery reusing the analysis from
2266 ipa_pure_const. This pass is effective when executed together with
2267 other optimization passes in early optimization pass queue. */
2271 const pass_data pass_data_nothrow
=
2273 GIMPLE_PASS
, /* type */
2274 "nothrow", /* name */
2275 OPTGROUP_NONE
, /* optinfo_flags */
2276 TV_IPA_PURE_CONST
, /* tv_id */
2277 0, /* properties_required */
2278 0, /* properties_provided */
2279 0, /* properties_destroyed */
2280 0, /* todo_flags_start */
2281 0, /* todo_flags_finish */
2284 class pass_nothrow
: public gimple_opt_pass
2287 pass_nothrow (gcc::context
*ctxt
)
2288 : gimple_opt_pass (pass_data_nothrow
, ctxt
)
2291 /* opt_pass methods: */
2292 opt_pass
* clone () { return new pass_nothrow (m_ctxt
); }
2293 virtual bool gate (function
*) { return optimize
; }
2294 virtual unsigned int execute (function
*);
2296 }; // class pass_nothrow
2299 pass_nothrow::execute (function
*)
2301 struct cgraph_node
*node
;
2302 basic_block this_block
;
2304 if (TREE_NOTHROW (current_function_decl
))
2307 node
= cgraph_node::get (current_function_decl
);
2309 /* We run during lowering, we cannot really use availability yet. */
2310 if (cgraph_node::get (current_function_decl
)->get_availability ()
2311 <= AVAIL_INTERPOSABLE
)
2314 fprintf (dump_file
, "Function is interposable;"
2315 " not analyzing.\n");
2319 FOR_EACH_BB_FN (this_block
, cfun
)
2321 for (gimple_stmt_iterator gsi
= gsi_start_bb (this_block
);
2324 if (stmt_can_throw_external (cfun
, gsi_stmt (gsi
)))
2326 if (is_gimple_call (gsi_stmt (gsi
)))
2328 tree callee_t
= gimple_call_fndecl (gsi_stmt (gsi
));
2329 if (callee_t
&& recursive_call_p (current_function_decl
,
2336 fprintf (dump_file
, "Statement can throw: ");
2337 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0);
2343 node
->set_nothrow_flag (true);
2345 bool cfg_changed
= false;
2346 if (self_recursive_p (node
))
2347 FOR_EACH_BB_FN (this_block
, cfun
)
2348 if (gimple
*g
= last_stmt (this_block
))
2349 if (is_gimple_call (g
))
2351 tree callee_t
= gimple_call_fndecl (g
);
2353 && recursive_call_p (current_function_decl
, callee_t
)
2354 && maybe_clean_eh_stmt (g
)
2355 && gimple_purge_dead_eh_edges (this_block
))
2360 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2361 current_function_name ());
2362 return cfg_changed
? TODO_cleanup_cfg
: 0;
2368 make_pass_nothrow (gcc::context
*ctxt
)
2370 return new pass_nothrow (ctxt
);