1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
44 #include "diagnostic.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
56 #include "tree-scalar-evolution.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 static const char *pure_const_names
[3] = {"const", "pure", "neither"};
84 static const char *malloc_state_names
[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
91 funct_state_d (): pure_const_state (IPA_NEITHER
),
92 state_previously_known (IPA_NEITHER
), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM
) {}
96 funct_state_d (const funct_state_d
&s
): pure_const_state (s
.pure_const_state
),
97 state_previously_known (s
.state_previously_known
),
98 looping_previously_known (s
.looping_previously_known
),
99 looping (s
.looping
), can_throw (s
.can_throw
), can_free (s
.can_free
),
100 malloc_state (s
.malloc_state
) {}
103 enum pure_const_state_e pure_const_state
;
104 /* What user set here; we can be always sure about this. */
105 enum pure_const_state_e state_previously_known
;
106 bool looping_previously_known
;
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
122 enum malloc_state_e malloc_state
;
125 typedef struct funct_state_d
* funct_state
;
127 /* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
131 class funct_state_summary_t
: public function_summary
<funct_state_d
*>
134 funct_state_summary_t (symbol_table
*symtab
):
135 function_summary
<funct_state_d
*> (symtab
) {}
137 virtual void insert (cgraph_node
*, funct_state_d
*state
);
138 virtual void duplicate (cgraph_node
*src_node
, cgraph_node
*dst_node
,
139 funct_state_d
*src_data
,
140 funct_state_d
*dst_data
);
143 static funct_state_summary_t
*funct_state_summaries
= NULL
;
145 static bool gate_pure_const (void);
149 const pass_data pass_data_ipa_pure_const
=
152 "pure-const", /* name */
153 OPTGROUP_NONE
, /* optinfo_flags */
154 TV_IPA_PURE_CONST
, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
162 class pass_ipa_pure_const
: public ipa_opt_pass_d
165 pass_ipa_pure_const(gcc::context
*ctxt
);
167 /* opt_pass methods: */
168 bool gate (function
*) { return gate_pure_const (); }
169 unsigned int execute (function
*fun
);
171 void register_hooks (void);
175 }; // class pass_ipa_pure_const
179 /* Try to guess if function body will always be visible to compiler
180 when compiling the call and whether compiler will be able
181 to propagate the information by itself. */
184 function_always_visible_to_compiler_p (tree decl
)
186 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
)
187 || DECL_COMDAT (decl
));
190 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
191 is true if the function is known to be finite. The diagnostic is
192 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
193 OPTION, this function may initialize it and it is always returned
196 static hash_set
<tree
> *
197 suggest_attribute (int option
, tree decl
, bool known_finite
,
198 hash_set
<tree
> *warned_about
,
199 const char * attrib_name
)
201 if (!option_enabled (option
, &global_options
))
203 if (TREE_THIS_VOLATILE (decl
)
204 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
208 warned_about
= new hash_set
<tree
>;
209 if (warned_about
->contains (decl
))
211 warned_about
->add (decl
);
212 warning_at (DECL_SOURCE_LOCATION (decl
),
215 ? G_("function might be candidate for attribute %qs")
216 : G_("function might be candidate for attribute %qs"
217 " if it is known to return normally"), attrib_name
);
221 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
222 is true if the function is known to be finite. */
225 warn_function_pure (tree decl
, bool known_finite
)
227 /* Declaring a void function pure makes no sense and is diagnosed
228 by -Wattributes because calling it would have no effect. */
229 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
232 static hash_set
<tree
> *warned_about
;
234 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
235 known_finite
, warned_about
, "pure");
238 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
239 is true if the function is known to be finite. */
242 warn_function_const (tree decl
, bool known_finite
)
244 /* Declaring a void function const makes no sense is diagnosed
245 by -Wattributes because calling it would have no effect. */
246 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
249 static hash_set
<tree
> *warned_about
;
251 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
252 known_finite
, warned_about
, "const");
255 /* Emit suggestion about __attribute__((malloc)) for DECL. */
258 warn_function_malloc (tree decl
)
260 static hash_set
<tree
> *warned_about
;
262 = suggest_attribute (OPT_Wsuggest_attribute_malloc
, decl
,
263 true, warned_about
, "malloc");
266 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
269 warn_function_noreturn (tree decl
)
271 tree original_decl
= decl
;
273 static hash_set
<tree
> *warned_about
;
274 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
275 && targetm
.warn_func_return (decl
))
277 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, original_decl
,
278 true, warned_about
, "noreturn");
282 warn_function_cold (tree decl
)
284 tree original_decl
= decl
;
286 static hash_set
<tree
> *warned_about
;
288 = suggest_attribute (OPT_Wsuggest_attribute_cold
, original_decl
,
289 true, warned_about
, "cold");
292 /* Check to see if the use (or definition when CHECKING_WRITE is true)
293 variable T is legal in a function that is either pure or const. */
296 check_decl (funct_state local
,
297 tree t
, bool checking_write
, bool ipa
)
299 /* Do not want to do anything with volatile except mark any
300 function that uses one to be not const or pure. */
301 if (TREE_THIS_VOLATILE (t
))
303 local
->pure_const_state
= IPA_NEITHER
;
305 fprintf (dump_file
, " Volatile operand is not const/pure\n");
309 /* Do not care about a local automatic that is not static. */
310 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
313 /* If the variable has the "used" attribute, treat it as if it had a
314 been touched by the devil. */
315 if (DECL_PRESERVE_P (t
))
317 local
->pure_const_state
= IPA_NEITHER
;
319 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
323 /* In IPA mode we are not interested in checking actual loads and stores;
324 they will be processed at propagation time using ipa_ref. */
328 /* Since we have dealt with the locals and params cases above, if we
329 are CHECKING_WRITE, this cannot be a pure or constant
333 local
->pure_const_state
= IPA_NEITHER
;
335 fprintf (dump_file
, " static/global memory write is not const/pure\n");
339 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
341 /* Readonly reads are safe. */
342 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
343 return; /* Read of a constant, do not change the function state. */
347 fprintf (dump_file
, " global memory read is not const\n");
348 /* Just a regular read. */
349 if (local
->pure_const_state
== IPA_CONST
)
350 local
->pure_const_state
= IPA_PURE
;
355 /* Compilation level statics can be read if they are readonly
357 if (TREE_READONLY (t
))
361 fprintf (dump_file
, " static memory read is not const\n");
362 /* Just a regular read. */
363 if (local
->pure_const_state
== IPA_CONST
)
364 local
->pure_const_state
= IPA_PURE
;
369 /* Check to see if the use (or definition when CHECKING_WRITE is true)
370 variable T is legal in a function that is either pure or const. */
373 check_op (funct_state local
, tree t
, bool checking_write
)
375 t
= get_base_address (t
);
376 if (t
&& TREE_THIS_VOLATILE (t
))
378 local
->pure_const_state
= IPA_NEITHER
;
380 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
384 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
385 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
386 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
389 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
392 else if (checking_write
)
394 local
->pure_const_state
= IPA_NEITHER
;
396 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
402 fprintf (dump_file
, " Indirect ref read is not const\n");
403 if (local
->pure_const_state
== IPA_CONST
)
404 local
->pure_const_state
= IPA_PURE
;
408 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
411 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
412 int flags
, bool cannot_lead_to_return
)
415 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
418 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
419 fprintf (dump_file
, " looping\n");
421 if (flags
& ECF_CONST
)
424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
425 fprintf (dump_file
, " const\n");
427 else if (flags
& ECF_PURE
)
430 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
431 fprintf (dump_file
, " pure\n");
433 else if (cannot_lead_to_return
)
437 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
438 fprintf (dump_file
, " ignoring side effects->pure looping\n");
442 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
443 fprintf (dump_file
, " neither\n");
444 *state
= IPA_NEITHER
;
449 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING better of the two variants.
451 Be sure to merge looping correctly. IPA_NEITHER functions
452 have looping 0 even if they don't have to return. */
455 better_state (enum pure_const_state_e
*state
, bool *looping
,
456 enum pure_const_state_e state2
, bool looping2
)
460 if (*state
== IPA_NEITHER
)
463 *looping
= MIN (*looping
, looping2
);
466 else if (state2
!= IPA_NEITHER
)
467 *looping
= MIN (*looping
, looping2
);
470 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
471 into STATE and LOOPING worse of the two variants.
472 N is the actual node called. */
475 worse_state (enum pure_const_state_e
*state
, bool *looping
,
476 enum pure_const_state_e state2
, bool looping2
,
477 struct symtab_node
*from
,
478 struct symtab_node
*to
)
480 /* Consider function:
487 During early optimization we will turn this into:
494 Now if this function will be detected as CONST however when interposed it
495 may end up being just pure. We always must assume the worst scenario here.
497 if (*state
== IPA_CONST
&& state2
== IPA_CONST
498 && to
&& !TREE_READONLY (to
->decl
) && !to
->binds_to_current_def_p (from
))
500 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
501 fprintf (dump_file
, "Dropping state to PURE because call to %s may not "
502 "bind to current def.\n", to
->name ());
505 *state
= MAX (*state
, state2
);
506 *looping
= MAX (*looping
, looping2
);
509 /* Recognize special cases of builtins that are by themselves not pure or const
510 but function using them is. */
512 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
515 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
516 switch (DECL_FUNCTION_CODE (callee
))
518 case BUILT_IN_RETURN
:
519 case BUILT_IN_UNREACHABLE
:
520 CASE_BUILT_IN_ALLOCA
:
521 case BUILT_IN_STACK_SAVE
:
522 case BUILT_IN_STACK_RESTORE
:
523 case BUILT_IN_EH_POINTER
:
524 case BUILT_IN_EH_FILTER
:
525 case BUILT_IN_UNWIND_RESUME
:
526 case BUILT_IN_CXA_END_CLEANUP
:
527 case BUILT_IN_EH_COPY_VALUES
:
528 case BUILT_IN_FRAME_ADDRESS
:
530 case BUILT_IN_APPLY_ARGS
:
531 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
:
532 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
:
536 case BUILT_IN_PREFETCH
:
546 /* Check the parameters of a function call to CALL_EXPR to see if
547 there are any references in the parameters that are not allowed for
548 pure or const functions. Also check to see if this is either an
549 indirect call, a call outside the compilation unit, or has special
550 attributes that may also effect the purity. The CALL_EXPR node for
551 the entire call expression. */
554 check_call (funct_state local
, gcall
*call
, bool ipa
)
556 int flags
= gimple_call_flags (call
);
557 tree callee_t
= gimple_call_fndecl (call
);
558 bool possibly_throws
= stmt_could_throw_p (call
);
559 bool possibly_throws_externally
= (possibly_throws
560 && stmt_can_throw_external (call
));
565 for (i
= 0; i
< gimple_num_ops (call
); i
++)
566 if (gimple_op (call
, i
)
567 && tree_could_throw_p (gimple_op (call
, i
)))
569 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
572 fprintf (dump_file
, " operand can throw; looping\n");
573 local
->looping
= true;
575 if (possibly_throws_externally
)
578 fprintf (dump_file
, " operand can throw externally\n");
579 local
->can_throw
= true;
584 /* The const and pure flags are set by a variety of places in the
585 compiler (including here). If someone has already set the flags
586 for the callee, (such as for some of the builtins) we will use
587 them, otherwise we will compute our own information.
589 Const and pure functions have less clobber effects than other
590 functions so we process these first. Otherwise if it is a call
591 outside the compilation unit or an indirect call we punt. This
592 leaves local calls which will be processed by following the call
596 enum pure_const_state_e call_state
;
599 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
600 && !nonfreeing_call_p (call
))
601 local
->can_free
= true;
603 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
605 worse_state (&local
->pure_const_state
, &local
->looping
,
606 call_state
, call_looping
,
610 /* When bad things happen to bad functions, they cannot be const
612 if (setjmp_call_p (callee_t
))
615 fprintf (dump_file
, " setjmp is not const/pure\n");
616 local
->looping
= true;
617 local
->pure_const_state
= IPA_NEITHER
;
620 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
621 switch (DECL_FUNCTION_CODE (callee_t
))
623 case BUILT_IN_LONGJMP
:
624 case BUILT_IN_NONLOCAL_GOTO
:
626 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
627 local
->pure_const_state
= IPA_NEITHER
;
628 local
->looping
= true;
634 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
635 local
->can_free
= true;
637 /* When not in IPA mode, we can still handle self recursion. */
639 && recursive_call_p (current_function_decl
, callee_t
))
642 fprintf (dump_file
, " Recursive call can loop.\n");
643 local
->looping
= true;
645 /* Either callee is unknown or we are doing local analysis.
646 Look to see if there are any bits available for the callee (such as by
647 declaration or because it is builtin) and process solely on the basis of
648 those bits. Handle internal calls always, those calls don't have
649 corresponding cgraph edges and thus aren't processed during
651 else if (!ipa
|| gimple_call_internal_p (call
))
653 enum pure_const_state_e call_state
;
655 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
658 fprintf (dump_file
, " can throw; looping\n");
659 local
->looping
= true;
661 if (possibly_throws_externally
)
665 fprintf (dump_file
, " can throw externally to lp %i\n",
666 lookup_stmt_eh_lp (call
));
668 fprintf (dump_file
, " callee:%s\n",
669 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
671 local
->can_throw
= true;
673 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
674 fprintf (dump_file
, " checking flags for call:");
675 state_from_flags (&call_state
, &call_looping
, flags
,
676 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
677 == (ECF_NORETURN
| ECF_NOTHROW
))
678 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
679 worse_state (&local
->pure_const_state
, &local
->looping
,
680 call_state
, call_looping
, NULL
, NULL
);
682 /* Direct functions calls are handled by IPA propagation. */
685 /* Wrapper around check_decl for loads in local more. */
688 check_load (gimple
*, tree op
, tree
, void *data
)
691 check_decl ((funct_state
)data
, op
, false, false);
693 check_op ((funct_state
)data
, op
, false);
697 /* Wrapper around check_decl for stores in local more. */
700 check_store (gimple
*, tree op
, tree
, void *data
)
703 check_decl ((funct_state
)data
, op
, true, false);
705 check_op ((funct_state
)data
, op
, true);
709 /* Wrapper around check_decl for loads in ipa mode. */
712 check_ipa_load (gimple
*, tree op
, tree
, void *data
)
715 check_decl ((funct_state
)data
, op
, false, true);
717 check_op ((funct_state
)data
, op
, false);
721 /* Wrapper around check_decl for stores in ipa mode. */
724 check_ipa_store (gimple
*, tree op
, tree
, void *data
)
727 check_decl ((funct_state
)data
, op
, true, true);
729 check_op ((funct_state
)data
, op
, true);
733 /* Look into pointer pointed to by GSIP and figure out what interesting side
736 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
738 gimple
*stmt
= gsi_stmt (*gsip
);
740 if (is_gimple_debug (stmt
))
743 /* Do consider clobber as side effects before IPA, so we rather inline
744 C++ destructors and keep clobber semantics than eliminate them.
746 TODO: We may get smarter during early optimizations on these and let
747 functions containing only clobbers to be optimized more. This is a common
748 case of C++ destructors. */
750 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
755 fprintf (dump_file
, " scanning: ");
756 print_gimple_stmt (dump_file
, stmt
, 0);
759 if (gimple_has_volatile_ops (stmt
)
760 && !gimple_clobber_p (stmt
))
762 local
->pure_const_state
= IPA_NEITHER
;
764 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
767 /* Look for loads and stores. */
768 walk_stmt_load_store_ops (stmt
, local
,
769 ipa
? check_ipa_load
: check_load
,
770 ipa
? check_ipa_store
: check_store
);
772 if (gimple_code (stmt
) != GIMPLE_CALL
773 && stmt_could_throw_p (stmt
))
775 if (cfun
->can_throw_non_call_exceptions
)
778 fprintf (dump_file
, " can throw; looping\n");
779 local
->looping
= true;
781 if (stmt_can_throw_external (stmt
))
784 fprintf (dump_file
, " can throw externally\n");
785 local
->can_throw
= true;
789 fprintf (dump_file
, " can throw\n");
791 switch (gimple_code (stmt
))
794 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
797 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
798 /* Target of long jump. */
801 fprintf (dump_file
, " nonlocal label is not const/pure\n");
802 local
->pure_const_state
= IPA_NEITHER
;
806 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
809 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
810 /* Abandon all hope, ye who enter here. */
811 local
->pure_const_state
= IPA_NEITHER
;
812 local
->can_free
= true;
814 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
817 fprintf (dump_file
, " volatile is not const/pure\n");
818 /* Abandon all hope, ye who enter here. */
819 local
->pure_const_state
= IPA_NEITHER
;
820 local
->looping
= true;
821 local
->can_free
= true;
829 /* Check that RETVAL is used only in STMT and in comparisons against 0.
830 RETVAL is return value of the function and STMT is return stmt. */
833 check_retval_uses (tree retval
, gimple
*stmt
)
835 imm_use_iterator use_iter
;
838 FOR_EACH_IMM_USE_STMT (use_stmt
, use_iter
, retval
)
839 if (gcond
*cond
= dyn_cast
<gcond
*> (use_stmt
))
841 tree op2
= gimple_cond_rhs (cond
);
842 if (!integer_zerop (op2
))
843 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
845 else if (gassign
*ga
= dyn_cast
<gassign
*> (use_stmt
))
847 enum tree_code code
= gimple_assign_rhs_code (ga
);
848 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
849 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
850 if (!integer_zerop (gimple_assign_rhs2 (ga
)))
851 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
853 else if (is_gimple_debug (use_stmt
))
855 else if (use_stmt
!= stmt
)
856 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
861 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
862 attribute. Currently this function does a very conservative analysis.
863 FUN is considered to be a candidate if
864 1) It returns a value of pointer type.
865 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
866 a phi, and element of phi is either NULL or
867 SSA_NAME_DEF_STMT(element) is function call.
868 3) The return-value has immediate uses only within comparisons (gcond or gassign)
869 and return_stmt (and likewise a phi arg has immediate use only within comparison
873 malloc_candidate_p (function
*fun
, bool ipa
)
875 basic_block exit_block
= EXIT_BLOCK_PTR_FOR_FN (fun
);
878 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
880 #define DUMP_AND_RETURN(reason) \
882 if (dump_file && (dump_flags & TDF_DETAILS)) \
883 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
884 (node->name()), (reason)); \
888 if (EDGE_COUNT (exit_block
->preds
) == 0
889 || !flag_delete_null_pointer_checks
)
892 FOR_EACH_EDGE (e
, ei
, exit_block
->preds
)
894 gimple_stmt_iterator gsi
= gsi_last_bb (e
->src
);
895 greturn
*ret_stmt
= dyn_cast
<greturn
*> (gsi_stmt (gsi
));
900 tree retval
= gimple_return_retval (ret_stmt
);
902 DUMP_AND_RETURN("No return value.")
904 if (TREE_CODE (retval
) != SSA_NAME
905 || TREE_CODE (TREE_TYPE (retval
)) != POINTER_TYPE
)
906 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
908 if (!check_retval_uses (retval
, ret_stmt
))
909 DUMP_AND_RETURN("Return value has uses outside return stmt"
910 " and comparisons against 0.")
912 gimple
*def
= SSA_NAME_DEF_STMT (retval
);
913 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (def
))
915 tree callee_decl
= gimple_call_fndecl (call_stmt
);
919 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
920 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
923 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
926 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
927 es
->is_return_callee_uncaptured
= true;
931 else if (gphi
*phi
= dyn_cast
<gphi
*> (def
))
933 bool all_args_zero
= true;
934 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
936 tree arg
= gimple_phi_arg_def (phi
, i
);
937 if (integer_zerop (arg
))
940 all_args_zero
= false;
941 if (TREE_CODE (arg
) != SSA_NAME
)
942 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
943 if (!check_retval_uses (arg
, phi
))
944 DUMP_AND_RETURN ("phi arg has uses outside phi"
945 " and comparisons against 0.")
947 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
948 gcall
*call_stmt
= dyn_cast
<gcall
*> (arg_def
);
951 tree callee_decl
= gimple_call_fndecl (call_stmt
);
954 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
955 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
956 " for non-ipa mode.")
958 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
961 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
962 es
->is_return_callee_uncaptured
= true;
967 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.");
971 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
974 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
975 fprintf (dump_file
, "\nFound %s to be candidate for malloc attribute\n",
976 IDENTIFIER_POINTER (DECL_NAME (fun
->decl
)));
979 #undef DUMP_AND_RETURN
983 /* This is the main routine for finding the reference patterns for
984 global variables within a function FN. */
987 analyze_function (struct cgraph_node
*fn
, bool ipa
)
989 tree decl
= fn
->decl
;
991 basic_block this_block
;
993 l
= XCNEW (struct funct_state_d
);
994 l
->pure_const_state
= IPA_CONST
;
995 l
->state_previously_known
= IPA_NEITHER
;
996 l
->looping_previously_known
= true;
998 l
->can_throw
= false;
1000 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
1001 flags_from_decl_or_type (fn
->decl
),
1002 fn
->cannot_return_p ());
1004 if (fn
->thunk
.thunk_p
|| fn
->alias
)
1006 /* Thunk gets propagated through, so nothing interesting happens. */
1008 if (fn
->thunk
.thunk_p
&& fn
->thunk
.virtual_offset_p
)
1009 l
->pure_const_state
= IPA_NEITHER
;
1015 fprintf (dump_file
, "\n\n local analysis of %s\n ",
1019 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1021 FOR_EACH_BB_FN (this_block
, cfun
)
1023 gimple_stmt_iterator gsi
;
1024 struct walk_stmt_info wi
;
1026 memset (&wi
, 0, sizeof (wi
));
1027 for (gsi
= gsi_start_bb (this_block
);
1031 check_stmt (&gsi
, l
, ipa
);
1032 if (l
->pure_const_state
== IPA_NEITHER
1041 if (l
->pure_const_state
!= IPA_NEITHER
)
1043 /* Const functions cannot have back edges (an
1044 indication of possible infinite loop side
1046 if (mark_dfs_back_edges ())
1048 /* Preheaders are needed for SCEV to work.
1049 Simple latches and recorded exits improve chances that loop will
1050 proved to be finite in testcases such as in loop-15.c
1052 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1053 | LOOPS_HAVE_SIMPLE_LATCHES
1054 | LOOPS_HAVE_RECORDED_EXITS
);
1055 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1056 flow_loops_dump (dump_file
, NULL
, 0);
1057 if (mark_irreducible_loops ())
1060 fprintf (dump_file
, " has irreducible loops\n");
1067 FOR_EACH_LOOP (loop
, 0)
1068 if (!finite_loop_p (loop
))
1071 fprintf (dump_file
, " can not prove finiteness of "
1072 "loop %i\n", loop
->num
);
1078 loop_optimizer_finalize ();
1082 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1083 fprintf (dump_file
, " checking previously known:");
1085 better_state (&l
->pure_const_state
, &l
->looping
,
1086 l
->state_previously_known
,
1087 l
->looping_previously_known
);
1088 if (TREE_NOTHROW (decl
))
1089 l
->can_throw
= false;
1091 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1092 if (DECL_IS_MALLOC (decl
))
1093 l
->malloc_state
= STATE_MALLOC
;
1094 else if (ipa
&& malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), true))
1095 l
->malloc_state
= STATE_MALLOC_TOP
;
1096 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), false))
1097 l
->malloc_state
= STATE_MALLOC
;
1103 fprintf (dump_file
, "Function is locally looping.\n");
1105 fprintf (dump_file
, "Function is locally throwing.\n");
1106 if (l
->pure_const_state
== IPA_CONST
)
1107 fprintf (dump_file
, "Function is locally const.\n");
1108 if (l
->pure_const_state
== IPA_PURE
)
1109 fprintf (dump_file
, "Function is locally pure.\n");
1111 fprintf (dump_file
, "Function can locally free.\n");
1112 if (l
->malloc_state
== STATE_MALLOC
)
1113 fprintf (dump_file
, "Function is locally malloc.\n");
1119 funct_state_summary_t::insert (cgraph_node
*node
, funct_state_d
*state
)
1121 /* There are some shared nodes, in particular the initializers on
1122 static declarations. We do not need to scan them more than once
1123 since all we would be interested in are the addressof
1125 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1127 funct_state_d
*a
= analyze_function (node
, true);
1128 new (state
) funct_state_d (*a
);
1133 /* Called when new clone is inserted to callgraph late. */
1136 funct_state_summary_t::duplicate (cgraph_node
*, cgraph_node
*,
1137 funct_state_d
*src_data
,
1138 funct_state_d
*dst_data
)
1140 new (dst_data
) funct_state_d (*src_data
);
1145 pass_ipa_pure_const::
1146 register_hooks (void)
1153 funct_state_summaries
= new funct_state_summary_t (symtab
);
1157 /* Analyze each function in the cgraph to see if it is locally PURE or
1161 pure_const_generate_summary (void)
1163 struct cgraph_node
*node
;
1165 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1166 pass
->register_hooks ();
1168 /* Process all of the functions.
1170 We process AVAIL_INTERPOSABLE functions. We can not use the results
1171 by default, but the info can be used at LTO with -fwhole-program or
1172 when function got cloned and the clone is AVAILABLE. */
1174 FOR_EACH_DEFINED_FUNCTION (node
)
1175 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1177 funct_state_d
*a
= analyze_function (node
, true);
1178 new (funct_state_summaries
->get_create (node
)) funct_state_d (*a
);
1184 /* Serialize the ipa info for lto. */
1187 pure_const_write_summary (void)
1189 struct cgraph_node
*node
;
1190 struct lto_simple_output_block
*ob
1191 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1192 unsigned int count
= 0;
1193 lto_symtab_encoder_iterator lsei
;
1194 lto_symtab_encoder_t encoder
;
1196 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1198 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1199 lsei_next_function_in_partition (&lsei
))
1201 node
= lsei_cgraph_node (lsei
);
1202 if (node
->definition
&& funct_state_summaries
->exists (node
))
1206 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1208 /* Process all of the functions. */
1209 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1210 lsei_next_function_in_partition (&lsei
))
1212 node
= lsei_cgraph_node (lsei
);
1213 funct_state_d
*fs
= funct_state_summaries
->get (node
);
1214 if (node
->definition
&& fs
!= NULL
)
1216 struct bitpack_d bp
;
1218 lto_symtab_encoder_t encoder
;
1220 encoder
= ob
->decl_state
->symtab_node_encoder
;
1221 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1222 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1224 /* Note that flags will need to be read in the opposite
1225 order as we are pushing the bitflags into FLAGS. */
1226 bp
= bitpack_create (ob
->main_stream
);
1227 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1228 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1229 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1230 bp_pack_value (&bp
, fs
->looping
, 1);
1231 bp_pack_value (&bp
, fs
->can_throw
, 1);
1232 bp_pack_value (&bp
, fs
->can_free
, 1);
1233 bp_pack_value (&bp
, fs
->malloc_state
, 2);
1234 streamer_write_bitpack (&bp
);
1238 lto_destroy_simple_output_block (ob
);
1242 /* Deserialize the ipa info for lto. */
1245 pure_const_read_summary (void)
1247 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1248 struct lto_file_decl_data
*file_data
;
1251 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1252 pass
->register_hooks ();
1254 while ((file_data
= file_data_vec
[j
++]))
1258 struct lto_input_block
*ib
1259 = lto_create_simple_input_block (file_data
,
1260 LTO_section_ipa_pure_const
,
1265 unsigned int count
= streamer_read_uhwi (ib
);
1267 for (i
= 0; i
< count
; i
++)
1270 struct cgraph_node
*node
;
1271 struct bitpack_d bp
;
1273 lto_symtab_encoder_t encoder
;
1275 index
= streamer_read_uhwi (ib
);
1276 encoder
= file_data
->symtab_node_encoder
;
1277 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1280 fs
= funct_state_summaries
->get_create (node
);
1281 /* Note that the flags must be read in the opposite
1282 order in which they were written (the bitflags were
1283 pushed into FLAGS). */
1284 bp
= streamer_read_bitpack (ib
);
1285 fs
->pure_const_state
1286 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1287 fs
->state_previously_known
1288 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1289 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1290 fs
->looping
= bp_unpack_value (&bp
, 1);
1291 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1292 fs
->can_free
= bp_unpack_value (&bp
, 1);
1294 = (enum malloc_state_e
) bp_unpack_value (&bp
, 2);
1298 int flags
= flags_from_decl_or_type (node
->decl
);
1299 fprintf (dump_file
, "Read info for %s ", node
->dump_name ());
1300 if (flags
& ECF_CONST
)
1301 fprintf (dump_file
, " const");
1302 if (flags
& ECF_PURE
)
1303 fprintf (dump_file
, " pure");
1304 if (flags
& ECF_NOTHROW
)
1305 fprintf (dump_file
, " nothrow");
1306 fprintf (dump_file
, "\n pure const state: %s\n",
1307 pure_const_names
[fs
->pure_const_state
]);
1308 fprintf (dump_file
, " previously known state: %s\n",
1309 pure_const_names
[fs
->state_previously_known
]);
1311 fprintf (dump_file
," function is locally looping\n");
1312 if (fs
->looping_previously_known
)
1313 fprintf (dump_file
," function is previously known looping\n");
1315 fprintf (dump_file
," function is locally throwing\n");
1317 fprintf (dump_file
," function can locally free\n");
1318 fprintf (dump_file
, "\n malloc state: %s\n",
1319 malloc_state_names
[fs
->malloc_state
]);
1323 lto_destroy_simple_input_block (file_data
,
1324 LTO_section_ipa_pure_const
,
1330 /* We only propagate across edges that can throw externally and their callee
1331 is not interposable. */
1334 ignore_edge_for_nothrow (struct cgraph_edge
*e
)
1336 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1339 enum availability avail
;
1340 cgraph_node
*n
= e
->callee
->function_or_virtual_thunk_symbol (&avail
,
1342 if (avail
<= AVAIL_INTERPOSABLE
|| TREE_NOTHROW (n
->decl
))
1344 return opt_for_fn (e
->callee
->decl
, flag_non_call_exceptions
)
1345 && !e
->callee
->binds_to_current_def_p (e
->caller
);
1348 /* Return true if NODE is self recursive function.
1349 Indirectly recursive functions appears as non-trivial strongly
1350 connected components, so we need to care about self recursion
1354 self_recursive_p (struct cgraph_node
*node
)
1356 struct cgraph_edge
*e
;
1357 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1358 if (e
->callee
->function_symbol () == node
)
1363 /* Return true if N is cdtor that is not const or pure. In this case we may
1364 need to remove unreachable function if it is marked const/pure. */
1367 cdtor_p (cgraph_node
*n
, void *)
1369 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1370 return ((!TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
))
1371 || DECL_LOOPING_CONST_OR_PURE_P (n
->decl
));
1375 /* We only propagate across edges with non-interposable callee. */
1378 ignore_edge_for_pure_const (struct cgraph_edge
*e
)
1380 enum availability avail
;
1381 e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1382 return (avail
<= AVAIL_INTERPOSABLE
);
1386 /* Produce transitive closure over the callgraph and compute pure/const
1390 propagate_pure_const (void)
1392 struct cgraph_node
*node
;
1393 struct cgraph_node
*w
;
1394 struct cgraph_node
**order
=
1395 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1398 struct ipa_dfs_info
* w_info
;
1399 bool remove_p
= false;
1402 order_pos
= ipa_reduced_postorder (order
, true, false,
1403 ignore_edge_for_pure_const
);
1406 cgraph_node::dump_cgraph (dump_file
);
1407 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1410 /* Propagate the local information through the call graph to produce
1411 the global information. All the nodes within a cycle will have
1412 the same info so we collapse cycles first. Then we can do the
1413 propagation in one pass from the leaves to the roots. */
1414 for (i
= 0; i
< order_pos
; i
++ )
1416 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1417 bool looping
= false;
1424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1425 fprintf (dump_file
, "Starting cycle\n");
1427 /* Find the worst state for any node in the cycle. */
1429 while (w
&& pure_const_state
!= IPA_NEITHER
)
1431 struct cgraph_edge
*e
;
1432 struct cgraph_edge
*ie
;
1434 struct ipa_ref
*ref
= NULL
;
1436 funct_state w_l
= funct_state_summaries
->get_create (w
);
1437 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1438 fprintf (dump_file
, " Visiting %s state:%s looping %i\n",
1440 pure_const_names
[w_l
->pure_const_state
],
1443 /* First merge in function body properties.
1444 We are safe to pass NULL as FROM and TO because we will take care
1445 of possible interposition when walking callees. */
1446 worse_state (&pure_const_state
, &looping
,
1447 w_l
->pure_const_state
, w_l
->looping
,
1449 if (pure_const_state
== IPA_NEITHER
)
1454 /* We consider recursive cycles as possibly infinite.
1455 This might be relaxed since infinite recursion leads to stack
1460 /* Now walk the edges and merge in callee properties. */
1461 for (e
= w
->callees
; e
&& pure_const_state
!= IPA_NEITHER
;
1464 enum availability avail
;
1465 struct cgraph_node
*y
= e
->callee
->
1466 function_or_virtual_thunk_symbol (&avail
,
1468 enum pure_const_state_e edge_state
= IPA_CONST
;
1469 bool edge_looping
= false;
1471 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1473 fprintf (dump_file
, " Call to %s",
1474 e
->callee
->dump_name ());
1476 if (avail
> AVAIL_INTERPOSABLE
)
1478 funct_state y_l
= funct_state_summaries
->get (y
);
1479 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1482 " state:%s looping:%i\n",
1483 pure_const_names
[y_l
->pure_const_state
],
1486 if (y_l
->pure_const_state
> IPA_PURE
1487 && e
->cannot_lead_to_return_p ())
1489 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1491 " Ignoring side effects"
1492 " -> pure, looping\n");
1493 edge_state
= IPA_PURE
;
1494 edge_looping
= true;
1498 edge_state
= y_l
->pure_const_state
;
1499 edge_looping
= y_l
->looping
;
1502 else if (special_builtin_state (&edge_state
, &edge_looping
,
1506 state_from_flags (&edge_state
, &edge_looping
,
1507 flags_from_decl_or_type (y
->decl
),
1508 e
->cannot_lead_to_return_p ());
1510 /* Merge the results with what we already know. */
1511 better_state (&edge_state
, &edge_looping
,
1512 w_l
->state_previously_known
,
1513 w_l
->looping_previously_known
);
1514 worse_state (&pure_const_state
, &looping
,
1515 edge_state
, edge_looping
, e
->caller
, e
->callee
);
1516 if (pure_const_state
== IPA_NEITHER
)
1520 /* Now process the indirect call. */
1521 for (ie
= w
->indirect_calls
;
1522 ie
&& pure_const_state
!= IPA_NEITHER
; ie
= ie
->next_callee
)
1524 enum pure_const_state_e edge_state
= IPA_CONST
;
1525 bool edge_looping
= false;
1527 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1528 fprintf (dump_file
, " Indirect call");
1529 state_from_flags (&edge_state
, &edge_looping
,
1530 ie
->indirect_info
->ecf_flags
,
1531 ie
->cannot_lead_to_return_p ());
1532 /* Merge the results with what we already know. */
1533 better_state (&edge_state
, &edge_looping
,
1534 w_l
->state_previously_known
,
1535 w_l
->looping_previously_known
);
1536 worse_state (&pure_const_state
, &looping
,
1537 edge_state
, edge_looping
, NULL
, NULL
);
1538 if (pure_const_state
== IPA_NEITHER
)
1542 /* And finally all loads and stores. */
1543 for (i
= 0; w
->iterate_reference (i
, ref
)
1544 && pure_const_state
!= IPA_NEITHER
; i
++)
1546 enum pure_const_state_e ref_state
= IPA_CONST
;
1547 bool ref_looping
= false;
1551 /* readonly reads are safe. */
1552 if (TREE_READONLY (ref
->referred
->decl
))
1554 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1555 fprintf (dump_file
, " nonreadonly global var read\n");
1556 ref_state
= IPA_PURE
;
1559 if (ref
->cannot_lead_to_return ())
1561 ref_state
= IPA_NEITHER
;
1562 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1563 fprintf (dump_file
, " global var write\n");
1570 better_state (&ref_state
, &ref_looping
,
1571 w_l
->state_previously_known
,
1572 w_l
->looping_previously_known
);
1573 worse_state (&pure_const_state
, &looping
,
1574 ref_state
, ref_looping
, NULL
, NULL
);
1575 if (pure_const_state
== IPA_NEITHER
)
1578 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1579 w
= w_info
->next_cycle
;
1581 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1582 fprintf (dump_file
, "Result %s looping %i\n",
1583 pure_const_names
[pure_const_state
],
1586 /* Find the worst state of can_free for any node in the cycle. */
1587 bool can_free
= false;
1589 while (w
&& !can_free
)
1591 struct cgraph_edge
*e
;
1592 funct_state w_l
= funct_state_summaries
->get (w
);
1595 || w
->get_availability () == AVAIL_INTERPOSABLE
1596 || w
->indirect_calls
)
1599 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1601 enum availability avail
;
1602 struct cgraph_node
*y
= e
->callee
->
1603 function_or_virtual_thunk_symbol (&avail
,
1606 if (avail
> AVAIL_INTERPOSABLE
)
1607 can_free
= funct_state_summaries
->get (y
)->can_free
;
1611 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1612 w
= w_info
->next_cycle
;
1615 /* Copy back the region's pure_const_state which is shared by
1616 all nodes in the region. */
1620 funct_state w_l
= funct_state_summaries
->get (w
);
1621 enum pure_const_state_e this_state
= pure_const_state
;
1622 bool this_looping
= looping
;
1624 w_l
->can_free
= can_free
;
1625 w
->nonfreeing_fn
= !can_free
;
1626 if (!can_free
&& dump_file
)
1627 fprintf (dump_file
, "Function found not to call free: %s\n",
1630 if (w_l
->state_previously_known
!= IPA_NEITHER
1631 && this_state
> w_l
->state_previously_known
)
1633 this_state
= w_l
->state_previously_known
;
1634 if (this_state
== IPA_NEITHER
)
1635 this_looping
= w_l
->looping_previously_known
;
1637 if (!this_looping
&& self_recursive_p (w
))
1638 this_looping
= true;
1639 if (!w_l
->looping_previously_known
)
1640 this_looping
= false;
1642 /* All nodes within a cycle share the same info. */
1643 w_l
->pure_const_state
= this_state
;
1644 w_l
->looping
= this_looping
;
1646 /* Inline clones share declaration with their offline copies;
1647 do not modify their declarations since the offline copy may
1649 if (!w
->global
.inlined_to
)
1653 if (!TREE_READONLY (w
->decl
))
1655 warn_function_const (w
->decl
, !this_looping
);
1657 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1658 this_looping
? "looping " : "",
1661 /* Turning constructor or destructor to non-looping const/pure
1662 enables us to possibly remove the function completely. */
1666 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1668 if (w
->set_const_flag (true, this_looping
))
1672 "Declaration updated to be %sconst: %s\n",
1673 this_looping
? "looping " : "",
1675 remove_p
|= has_cdtor
;
1680 if (!DECL_PURE_P (w
->decl
))
1682 warn_function_pure (w
->decl
, !this_looping
);
1684 fprintf (dump_file
, "Function found to be %spure: %s\n",
1685 this_looping
? "looping " : "",
1691 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1693 if (w
->set_pure_flag (true, this_looping
))
1697 "Declaration updated to be %spure: %s\n",
1698 this_looping
? "looping " : "",
1700 remove_p
|= has_cdtor
;
1707 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1708 w
= w_info
->next_cycle
;
1712 ipa_free_postorder_info ();
1717 /* Produce transitive closure over the callgraph and compute nothrow
1721 propagate_nothrow (void)
1723 struct cgraph_node
*node
;
1724 struct cgraph_node
*w
;
1725 struct cgraph_node
**order
=
1726 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1729 struct ipa_dfs_info
* w_info
;
1731 order_pos
= ipa_reduced_postorder (order
, true, false,
1732 ignore_edge_for_nothrow
);
1735 cgraph_node::dump_cgraph (dump_file
);
1736 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1739 /* Propagate the local information through the call graph to produce
1740 the global information. All the nodes within a cycle will have
1741 the same info so we collapse cycles first. Then we can do the
1742 propagation in one pass from the leaves to the roots. */
1743 for (i
= 0; i
< order_pos
; i
++ )
1745 bool can_throw
= false;
1751 /* Find the worst state for any node in the cycle. */
1753 while (w
&& !can_throw
)
1755 struct cgraph_edge
*e
, *ie
;
1757 if (!TREE_NOTHROW (w
->decl
))
1759 funct_state w_l
= funct_state_summaries
->get_create (w
);
1762 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1765 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1767 enum availability avail
;
1769 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1772 struct cgraph_node
*y
= e
->callee
->
1773 function_or_virtual_thunk_symbol (&avail
,
1776 /* We can use info about the callee only if we know it can
1778 When callee is compiled with non-call exceptions we also
1779 must check that the declaration is bound to current
1780 body as other semantically equivalent body may still
1782 if (avail
<= AVAIL_INTERPOSABLE
1783 || (!TREE_NOTHROW (y
->decl
)
1784 && (funct_state_summaries
->get_create (y
)->can_throw
1785 || (opt_for_fn (y
->decl
, flag_non_call_exceptions
)
1786 && !e
->callee
->binds_to_current_def_p (w
)))))
1789 for (ie
= w
->indirect_calls
; ie
&& !can_throw
;
1790 ie
= ie
->next_callee
)
1791 if (ie
->can_throw_external
1792 && !(ie
->indirect_info
->ecf_flags
& ECF_NOTHROW
))
1795 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1796 w
= w_info
->next_cycle
;
1799 /* Copy back the region's pure_const_state which is shared by
1800 all nodes in the region. */
1804 funct_state w_l
= funct_state_summaries
->get_create (w
);
1805 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1807 /* Inline clones share declaration with their offline copies;
1808 do not modify their declarations since the offline copy may
1810 if (!w
->global
.inlined_to
)
1812 w
->set_nothrow_flag (true);
1814 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1818 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1819 w_l
->can_throw
= true;
1820 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1821 w
= w_info
->next_cycle
;
1825 ipa_free_postorder_info ();
1829 /* Debugging function to dump state of malloc lattice. */
1833 dump_malloc_lattice (FILE *dump_file
, const char *s
)
1838 fprintf (dump_file
, "\n\nMALLOC LATTICE %s:\n", s
);
1840 FOR_EACH_FUNCTION (node
)
1842 funct_state fs
= funct_state_summaries
->get (node
);
1844 fprintf (dump_file
, "%s: %s\n", node
->name (),
1845 malloc_state_names
[fs
->malloc_state
]);
1849 /* Propagate malloc attribute across the callgraph. */
1852 propagate_malloc (void)
1855 FOR_EACH_FUNCTION (node
)
1857 if (DECL_IS_MALLOC (node
->decl
))
1858 if (!funct_state_summaries
->exists (node
))
1860 funct_state fs
= funct_state_summaries
->get_create (node
);
1861 fs
->malloc_state
= STATE_MALLOC
;
1865 dump_malloc_lattice (dump_file
, "Initial");
1866 struct cgraph_node
**order
1867 = XNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1868 int order_pos
= ipa_reverse_postorder (order
);
1869 bool changed
= true;
1874 /* Walk in postorder. */
1875 for (int i
= order_pos
- 1; i
>= 0; --i
)
1877 cgraph_node
*node
= order
[i
];
1879 || !node
->definition
1880 || !funct_state_summaries
->exists (node
))
1883 funct_state l
= funct_state_summaries
->get (node
);
1885 /* FIXME: add support for indirect-calls. */
1886 if (node
->indirect_calls
)
1888 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1892 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
1894 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1898 if (l
->malloc_state
== STATE_MALLOC_BOTTOM
)
1901 vec
<cgraph_node
*> callees
= vNULL
;
1902 for (cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1904 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
1905 if (es
&& es
->is_return_callee_uncaptured
)
1906 callees
.safe_push (cs
->callee
);
1909 malloc_state_e new_state
= l
->malloc_state
;
1910 for (unsigned j
= 0; j
< callees
.length (); j
++)
1912 cgraph_node
*callee
= callees
[j
];
1913 if (!funct_state_summaries
->exists (node
))
1915 new_state
= STATE_MALLOC_BOTTOM
;
1918 malloc_state_e callee_state
1919 = funct_state_summaries
->get_create (callee
)->malloc_state
;
1920 if (new_state
< callee_state
)
1921 new_state
= callee_state
;
1923 if (new_state
!= l
->malloc_state
)
1926 l
->malloc_state
= new_state
;
1931 FOR_EACH_DEFINED_FUNCTION (node
)
1932 if (funct_state_summaries
->exists (node
))
1934 funct_state l
= funct_state_summaries
->get (node
);
1936 && l
->malloc_state
== STATE_MALLOC
1937 && !node
->global
.inlined_to
)
1939 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1940 fprintf (dump_file
, "Function %s found to be malloc\n",
1943 bool malloc_decl_p
= DECL_IS_MALLOC (node
->decl
);
1944 node
->set_malloc_flag (true);
1945 if (!malloc_decl_p
&& warn_suggest_attribute_malloc
)
1946 warn_function_malloc (node
->decl
);
1950 dump_malloc_lattice (dump_file
, "after propagation");
1951 ipa_free_postorder_info ();
1955 /* Produce the global information by preforming a transitive closure
1956 on the local information that was produced by generate_summary. */
1959 pass_ipa_pure_const::
1960 execute (function
*)
1964 /* Nothrow makes more function to not lead to return and improve
1966 propagate_nothrow ();
1967 propagate_malloc ();
1968 remove_p
= propagate_pure_const ();
1970 delete funct_state_summaries
;
1971 return remove_p
? TODO_remove_functions
: 0;
1975 gate_pure_const (void)
1977 return flag_ipa_pure_const
|| in_lto_p
;
1980 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
1981 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
1982 pure_const_generate_summary
, /* generate_summary */
1983 pure_const_write_summary
, /* write_summary */
1984 pure_const_read_summary
, /* read_summary */
1985 NULL
, /* write_optimization_summary */
1986 NULL
, /* read_optimization_summary */
1987 NULL
, /* stmt_fixup */
1988 0, /* function_transform_todo_flags_start */
1989 NULL
, /* function_transform */
1990 NULL
), /* variable_transform */
1994 make_pass_ipa_pure_const (gcc::context
*ctxt
)
1996 return new pass_ipa_pure_const (ctxt
);
1999 /* Return true if function should be skipped for local pure const analysis. */
2002 skip_function_for_local_pure_const (struct cgraph_node
*node
)
2004 /* Because we do not schedule pass_fixup_cfg over whole program after early
2005 optimizations we must not promote functions that are called by already
2006 processed functions. */
2008 if (function_called_by_processed_nodes_p ())
2011 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
2014 /* Save some work and do not analyze functions which are interposable and
2015 do not have any non-interposable aliases. */
2016 if (node
->get_availability () <= AVAIL_INTERPOSABLE
2017 && !node
->has_aliases_p ())
2021 "Function is interposable; not analyzing.\n");
2027 /* Simple local pass for pure const discovery reusing the analysis from
2028 ipa_pure_const. This pass is effective when executed together with
2029 other optimization passes in early optimization pass queue. */
2033 const pass_data pass_data_local_pure_const
=
2035 GIMPLE_PASS
, /* type */
2036 "local-pure-const", /* name */
2037 OPTGROUP_NONE
, /* optinfo_flags */
2038 TV_IPA_PURE_CONST
, /* tv_id */
2039 0, /* properties_required */
2040 0, /* properties_provided */
2041 0, /* properties_destroyed */
2042 0, /* todo_flags_start */
2043 0, /* todo_flags_finish */
2046 class pass_local_pure_const
: public gimple_opt_pass
2049 pass_local_pure_const (gcc::context
*ctxt
)
2050 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
2053 /* opt_pass methods: */
2054 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
2055 virtual bool gate (function
*) { return gate_pure_const (); }
2056 virtual unsigned int execute (function
*);
2058 }; // class pass_local_pure_const
2061 pass_local_pure_const::execute (function
*fun
)
2063 bool changed
= false;
2066 struct cgraph_node
*node
;
2068 node
= cgraph_node::get (current_function_decl
);
2069 skip
= skip_function_for_local_pure_const (node
);
2071 if (!warn_suggest_attribute_const
2072 && !warn_suggest_attribute_pure
2076 l
= analyze_function (node
, false);
2078 /* Do NORETURN discovery. */
2079 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
2080 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2082 warn_function_noreturn (fun
->decl
);
2084 fprintf (dump_file
, "Function found to be noreturn: %s\n",
2085 current_function_name ());
2087 /* Update declaration and reduce profile to executed once. */
2088 TREE_THIS_VOLATILE (current_function_decl
) = 1;
2089 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
2090 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2095 switch (l
->pure_const_state
)
2098 if (!TREE_READONLY (current_function_decl
))
2100 warn_function_const (current_function_decl
, !l
->looping
);
2102 fprintf (dump_file
, "Function found to be %sconst: %s\n",
2103 l
->looping
? "looping " : "",
2104 current_function_name ());
2106 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2110 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2111 current_function_name ());
2113 if (!skip
&& node
->set_const_flag (true, l
->looping
))
2116 fprintf (dump_file
, "Declaration updated to be %sconst: %s\n",
2117 l
->looping
? "looping " : "",
2118 current_function_name ());
2124 if (!DECL_PURE_P (current_function_decl
))
2126 warn_function_pure (current_function_decl
, !l
->looping
);
2128 fprintf (dump_file
, "Function found to be %spure: %s\n",
2129 l
->looping
? "looping " : "",
2130 current_function_name ());
2132 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2136 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2137 current_function_name ());
2139 if (!skip
&& node
->set_pure_flag (true, l
->looping
))
2142 fprintf (dump_file
, "Declaration updated to be %spure: %s\n",
2143 l
->looping
? "looping " : "",
2144 current_function_name ());
2152 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
2154 node
->set_nothrow_flag (true);
2157 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2158 current_function_name ());
2161 if (l
->malloc_state
== STATE_MALLOC
2162 && !DECL_IS_MALLOC (current_function_decl
))
2164 node
->set_malloc_flag (true);
2165 if (warn_suggest_attribute_malloc
)
2166 warn_function_malloc (node
->decl
);
2169 fprintf (dump_file
, "Function found to be malloc: %s\n",
2175 return execute_fixup_cfg ();
2183 make_pass_local_pure_const (gcc::context
*ctxt
)
2185 return new pass_local_pure_const (ctxt
);
2188 /* Emit noreturn warnings. */
2192 const pass_data pass_data_warn_function_noreturn
=
2194 GIMPLE_PASS
, /* type */
2195 "*warn_function_noreturn", /* name */
2196 OPTGROUP_NONE
, /* optinfo_flags */
2197 TV_NONE
, /* tv_id */
2198 PROP_cfg
, /* properties_required */
2199 0, /* properties_provided */
2200 0, /* properties_destroyed */
2201 0, /* todo_flags_start */
2202 0, /* todo_flags_finish */
2205 class pass_warn_function_noreturn
: public gimple_opt_pass
2208 pass_warn_function_noreturn (gcc::context
*ctxt
)
2209 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
2212 /* opt_pass methods: */
2213 virtual bool gate (function
*) { return warn_suggest_attribute_noreturn
; }
2214 virtual unsigned int execute (function
*fun
)
2216 if (!TREE_THIS_VOLATILE (current_function_decl
)
2217 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2218 warn_function_noreturn (current_function_decl
);
2222 }; // class pass_warn_function_noreturn
2227 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
2229 return new pass_warn_function_noreturn (ctxt
);
2232 /* Simple local pass for pure const discovery reusing the analysis from
2233 ipa_pure_const. This pass is effective when executed together with
2234 other optimization passes in early optimization pass queue. */
2238 const pass_data pass_data_nothrow
=
2240 GIMPLE_PASS
, /* type */
2241 "nothrow", /* name */
2242 OPTGROUP_NONE
, /* optinfo_flags */
2243 TV_IPA_PURE_CONST
, /* tv_id */
2244 0, /* properties_required */
2245 0, /* properties_provided */
2246 0, /* properties_destroyed */
2247 0, /* todo_flags_start */
2248 0, /* todo_flags_finish */
2251 class pass_nothrow
: public gimple_opt_pass
2254 pass_nothrow (gcc::context
*ctxt
)
2255 : gimple_opt_pass (pass_data_nothrow
, ctxt
)
2258 /* opt_pass methods: */
2259 opt_pass
* clone () { return new pass_nothrow (m_ctxt
); }
2260 virtual bool gate (function
*) { return optimize
; }
2261 virtual unsigned int execute (function
*);
2263 }; // class pass_nothrow
2266 pass_nothrow::execute (function
*)
2268 struct cgraph_node
*node
;
2269 basic_block this_block
;
2271 if (TREE_NOTHROW (current_function_decl
))
2274 node
= cgraph_node::get (current_function_decl
);
2276 /* We run during lowering, we can not really use availability yet. */
2277 if (cgraph_node::get (current_function_decl
)->get_availability ()
2278 <= AVAIL_INTERPOSABLE
)
2281 fprintf (dump_file
, "Function is interposable;"
2282 " not analyzing.\n");
2286 FOR_EACH_BB_FN (this_block
, cfun
)
2288 for (gimple_stmt_iterator gsi
= gsi_start_bb (this_block
);
2291 if (stmt_can_throw_external (gsi_stmt (gsi
)))
2293 if (is_gimple_call (gsi_stmt (gsi
)))
2295 tree callee_t
= gimple_call_fndecl (gsi_stmt (gsi
));
2296 if (callee_t
&& recursive_call_p (current_function_decl
,
2303 fprintf (dump_file
, "Statement can throw: ");
2304 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0);
2310 node
->set_nothrow_flag (true);
2312 bool cfg_changed
= false;
2313 if (self_recursive_p (node
))
2314 FOR_EACH_BB_FN (this_block
, cfun
)
2315 if (gimple
*g
= last_stmt (this_block
))
2316 if (is_gimple_call (g
))
2318 tree callee_t
= gimple_call_fndecl (g
);
2320 && recursive_call_p (current_function_decl
, callee_t
)
2321 && maybe_clean_eh_stmt (g
)
2322 && gimple_purge_dead_eh_edges (this_block
))
2327 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2328 current_function_name ());
2329 return cfg_changed
? TODO_cleanup_cfg
: 0;
2335 make_pass_nothrow (gcc::context
*ctxt
)
2337 return new pass_nothrow (ctxt
);