1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
44 #include "diagnostic.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
56 #include "tree-scalar-evolution.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
65 #include "ipa-fnsummary.h"
66 #include "symtab-thunks.h"
69 /* Lattice values for const and pure functions. Everything starts out
70 being const, then may drop to pure and then neither depending on
72 enum pure_const_state_e
79 static const char *pure_const_names
[3] = {"const", "pure", "neither"};
88 static const char *malloc_state_names
[] = {"malloc_top", "malloc", "malloc_bottom"};
90 /* Holder for the const_state. There is one of these per function
95 funct_state_d (): pure_const_state (IPA_NEITHER
),
96 state_previously_known (IPA_NEITHER
), looping_previously_known (true),
97 looping (true), can_throw (true), can_free (true),
98 malloc_state (STATE_MALLOC_BOTTOM
) {}
100 funct_state_d (const funct_state_d
&s
): pure_const_state (s
.pure_const_state
),
101 state_previously_known (s
.state_previously_known
),
102 looping_previously_known (s
.looping_previously_known
),
103 looping (s
.looping
), can_throw (s
.can_throw
), can_free (s
.can_free
),
104 malloc_state (s
.malloc_state
) {}
107 enum pure_const_state_e pure_const_state
;
108 /* What user set here; we can be always sure about this. */
109 enum pure_const_state_e state_previously_known
;
110 bool looping_previously_known
;
112 /* True if the function could possibly infinite loop. There are a
113 lot of ways that this could be determined. We are pretty
114 conservative here. While it is possible to cse pure and const
115 calls, it is not legal to have dce get rid of the call if there
116 is a possibility that the call could infinite loop since this is
117 a behavioral change. */
122 /* If function can call free, munmap or otherwise make previously
123 non-trapping memory accesses trapping. */
126 enum malloc_state_e malloc_state
;
129 typedef class funct_state_d
* funct_state
;
131 /* The storage of the funct_state is abstracted because there is the
132 possibility that it may be desirable to move this to the cgraph
135 class funct_state_summary_t
:
136 public fast_function_summary
<funct_state_d
*, va_heap
>
139 funct_state_summary_t (symbol_table
*symtab
):
140 fast_function_summary
<funct_state_d
*, va_heap
> (symtab
) {}
142 void insert (cgraph_node
*, funct_state_d
*state
) final override
;
143 void duplicate (cgraph_node
*src_node
, cgraph_node
*dst_node
,
144 funct_state_d
*src_data
,
145 funct_state_d
*dst_data
) final override
;
148 static funct_state_summary_t
*funct_state_summaries
= NULL
;
150 static bool gate_pure_const (void);
154 const pass_data pass_data_ipa_pure_const
=
157 "pure-const", /* name */
158 OPTGROUP_NONE
, /* optinfo_flags */
159 TV_IPA_PURE_CONST
, /* tv_id */
160 0, /* properties_required */
161 0, /* properties_provided */
162 0, /* properties_destroyed */
163 0, /* todo_flags_start */
164 0, /* todo_flags_finish */
167 class pass_ipa_pure_const
: public ipa_opt_pass_d
170 pass_ipa_pure_const(gcc::context
*ctxt
);
172 /* opt_pass methods: */
173 bool gate (function
*) final override
{ return gate_pure_const (); }
174 unsigned int execute (function
*fun
) final override
;
176 void register_hooks (void);
180 }; // class pass_ipa_pure_const
184 /* Try to guess if function body will always be visible to compiler
185 when compiling the call and whether compiler will be able
186 to propagate the information by itself. */
189 function_always_visible_to_compiler_p (tree decl
)
191 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
)
192 || DECL_COMDAT (decl
));
195 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
196 is true if the function is known to be finite. The diagnostic is
197 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
198 OPTION, this function may initialize it and it is always returned
201 static hash_set
<tree
> *
202 suggest_attribute (int option
, tree decl
, bool known_finite
,
203 hash_set
<tree
> *warned_about
,
204 const char * attrib_name
)
206 if (!option_enabled (option
, lang_hooks
.option_lang_mask (), &global_options
))
208 if (TREE_THIS_VOLATILE (decl
)
209 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
213 warned_about
= new hash_set
<tree
>;
214 if (warned_about
->contains (decl
))
216 warned_about
->add (decl
);
217 warning_at (DECL_SOURCE_LOCATION (decl
),
220 ? G_("function might be candidate for attribute %qs")
221 : G_("function might be candidate for attribute %qs"
222 " if it is known to return normally"), attrib_name
);
226 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
227 is true if the function is known to be finite. */
230 warn_function_pure (tree decl
, bool known_finite
)
232 /* Declaring a void function pure makes no sense and is diagnosed
233 by -Wattributes because calling it would have no effect. */
234 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
237 static hash_set
<tree
> *warned_about
;
239 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
240 known_finite
, warned_about
, "pure");
243 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
244 is true if the function is known to be finite. */
247 warn_function_const (tree decl
, bool known_finite
)
249 /* Declaring a void function const makes no sense is diagnosed
250 by -Wattributes because calling it would have no effect. */
251 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
254 static hash_set
<tree
> *warned_about
;
256 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
257 known_finite
, warned_about
, "const");
260 /* Emit suggestion about __attribute__((malloc)) for DECL. */
263 warn_function_malloc (tree decl
)
265 static hash_set
<tree
> *warned_about
;
267 = suggest_attribute (OPT_Wsuggest_attribute_malloc
, decl
,
268 true, warned_about
, "malloc");
271 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
274 warn_function_noreturn (tree decl
)
276 tree original_decl
= decl
;
278 static hash_set
<tree
> *warned_about
;
279 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
280 && targetm
.warn_func_return (decl
))
282 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, original_decl
,
283 true, warned_about
, "noreturn");
287 warn_function_cold (tree decl
)
289 tree original_decl
= decl
;
291 static hash_set
<tree
> *warned_about
;
293 = suggest_attribute (OPT_Wsuggest_attribute_cold
, original_decl
,
294 true, warned_about
, "cold");
298 warn_function_returns_nonnull (tree decl
)
300 static hash_set
<tree
> *warned_about
;
302 = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull
, decl
,
303 true, warned_about
, "returns_nonnull");
306 /* Check to see if the use (or definition when CHECKING_WRITE is true)
307 variable T is legal in a function that is either pure or const. */
310 check_decl (funct_state local
,
311 tree t
, bool checking_write
, bool ipa
)
313 /* Do not want to do anything with volatile except mark any
314 function that uses one to be not const or pure. */
315 if (TREE_THIS_VOLATILE (t
))
317 local
->pure_const_state
= IPA_NEITHER
;
319 fprintf (dump_file
, " Volatile operand is not const/pure\n");
323 /* Do not care about a local automatic that is not static. */
324 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
327 /* If the variable has the "used" attribute, treat it as if it had a
328 been touched by the devil. */
329 if (DECL_PRESERVE_P (t
))
331 local
->pure_const_state
= IPA_NEITHER
;
333 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
337 /* In IPA mode we are not interested in checking actual loads and stores;
338 they will be processed at propagation time using ipa_ref. */
342 /* Since we have dealt with the locals and params cases above, if we
343 are CHECKING_WRITE, this cannot be a pure or constant
347 local
->pure_const_state
= IPA_NEITHER
;
349 fprintf (dump_file
, " static/global memory write is not const/pure\n");
353 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
355 /* Readonly reads are safe. */
356 if (TREE_READONLY (t
))
357 return; /* Read of a constant, do not change the function state. */
361 fprintf (dump_file
, " global memory read is not const\n");
362 /* Just a regular read. */
363 if (local
->pure_const_state
== IPA_CONST
)
364 local
->pure_const_state
= IPA_PURE
;
369 /* Compilation level statics can be read if they are readonly
371 if (TREE_READONLY (t
))
375 fprintf (dump_file
, " static memory read is not const\n");
376 /* Just a regular read. */
377 if (local
->pure_const_state
== IPA_CONST
)
378 local
->pure_const_state
= IPA_PURE
;
383 /* Check to see if the use (or definition when CHECKING_WRITE is true)
384 variable T is legal in a function that is either pure or const. */
387 check_op (funct_state local
, tree t
, bool checking_write
)
389 t
= get_base_address (t
);
390 if (t
&& TREE_THIS_VOLATILE (t
))
392 local
->pure_const_state
= IPA_NEITHER
;
394 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
397 else if (refs_local_or_readonly_memory_p (t
))
400 fprintf (dump_file
, " Indirect ref to local or readonly "
404 else if (checking_write
)
406 local
->pure_const_state
= IPA_NEITHER
;
408 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
414 fprintf (dump_file
, " Indirect ref read is not const\n");
415 if (local
->pure_const_state
== IPA_CONST
)
416 local
->pure_const_state
= IPA_PURE
;
420 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
423 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
424 int flags
, bool cannot_lead_to_return
)
427 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
430 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
431 fprintf (dump_file
, " looping\n");
433 if (flags
& ECF_CONST
)
436 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
437 fprintf (dump_file
, " const\n");
439 else if (flags
& ECF_PURE
)
442 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
443 fprintf (dump_file
, " pure\n");
445 else if (cannot_lead_to_return
)
449 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
450 fprintf (dump_file
, " ignoring side effects->pure looping\n");
454 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
455 fprintf (dump_file
, " neither\n");
456 *state
= IPA_NEITHER
;
461 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
462 into STATE and LOOPING better of the two variants.
463 Be sure to merge looping correctly. IPA_NEITHER functions
464 have looping 0 even if they don't have to return. */
467 better_state (enum pure_const_state_e
*state
, bool *looping
,
468 enum pure_const_state_e state2
, bool looping2
)
472 if (*state
== IPA_NEITHER
)
475 *looping
= MIN (*looping
, looping2
);
478 else if (state2
!= IPA_NEITHER
)
479 *looping
= MIN (*looping
, looping2
);
482 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
483 into STATE and LOOPING worse of the two variants.
484 N is the actual node called. */
487 worse_state (enum pure_const_state_e
*state
, bool *looping
,
488 enum pure_const_state_e state2
, bool looping2
,
489 struct symtab_node
*from
,
490 struct symtab_node
*to
)
492 /* Consider function:
499 During early optimization we will turn this into:
506 Now if this function will be detected as CONST however when interposed it
507 may end up being just pure. We always must assume the worst scenario here.
509 if (*state
== IPA_CONST
&& state2
== IPA_CONST
510 && to
&& !TREE_READONLY (to
->decl
) && !to
->binds_to_current_def_p (from
))
512 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
513 fprintf (dump_file
, "Dropping state to PURE because call to %s may not "
514 "bind to current def.\n", to
->dump_name ());
517 *state
= MAX (*state
, state2
);
518 *looping
= MAX (*looping
, looping2
);
521 /* Recognize special cases of builtins that are by themselves not const
522 but function using them is. */
524 builtin_safe_for_const_function_p (bool *looping
, tree callee
)
526 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
527 switch (DECL_FUNCTION_CODE (callee
))
529 case BUILT_IN_RETURN
:
530 case BUILT_IN_UNREACHABLE
:
531 CASE_BUILT_IN_ALLOCA
:
532 case BUILT_IN_STACK_SAVE
:
533 case BUILT_IN_STACK_RESTORE
:
534 case BUILT_IN_EH_POINTER
:
535 case BUILT_IN_EH_FILTER
:
536 case BUILT_IN_UNWIND_RESUME
:
537 case BUILT_IN_CXA_END_CLEANUP
:
538 case BUILT_IN_EH_COPY_VALUES
:
539 case BUILT_IN_FRAME_ADDRESS
:
540 case BUILT_IN_APPLY_ARGS
:
541 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
:
542 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
:
543 case BUILT_IN_DWARF_CFA
:
544 case BUILT_IN_RETURN_ADDRESS
:
547 case BUILT_IN_PREFETCH
:
556 /* Check the parameters of a function call to CALL_EXPR to see if
557 there are any references in the parameters that are not allowed for
558 pure or const functions. Also check to see if this is either an
559 indirect call, a call outside the compilation unit, or has special
560 attributes that may also effect the purity. The CALL_EXPR node for
561 the entire call expression. */
564 check_call (funct_state local
, gcall
*call
, bool ipa
)
566 int flags
= gimple_call_flags (call
);
567 tree callee_t
= gimple_call_fndecl (call
);
568 bool possibly_throws
= stmt_could_throw_p (cfun
, call
);
569 bool possibly_throws_externally
= (possibly_throws
570 && stmt_can_throw_external (cfun
, call
));
575 for (i
= 0; i
< gimple_num_ops (call
); i
++)
576 if (gimple_op (call
, i
)
577 && tree_could_throw_p (gimple_op (call
, i
)))
579 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
582 fprintf (dump_file
, " operand can throw; looping\n");
583 local
->looping
= true;
585 if (possibly_throws_externally
)
588 fprintf (dump_file
, " operand can throw externally\n");
589 local
->can_throw
= true;
594 /* The const and pure flags are set by a variety of places in the
595 compiler (including here). If someone has already set the flags
596 for the callee, (such as for some of the builtins) we will use
597 them, otherwise we will compute our own information.
599 Const and pure functions have less clobber effects than other
600 functions so we process these first. Otherwise if it is a call
601 outside the compilation unit or an indirect call we punt. This
602 leaves local calls which will be processed by following the call
608 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
609 && !nonfreeing_call_p (call
))
610 local
->can_free
= true;
612 if (builtin_safe_for_const_function_p (&call_looping
, callee_t
))
614 worse_state (&local
->pure_const_state
, &local
->looping
,
615 IPA_CONST
, call_looping
,
619 /* When bad things happen to bad functions, they cannot be const
621 if (setjmp_call_p (callee_t
))
624 fprintf (dump_file
, " setjmp is not const/pure\n");
625 local
->looping
= true;
626 local
->pure_const_state
= IPA_NEITHER
;
629 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
630 switch (DECL_FUNCTION_CODE (callee_t
))
632 case BUILT_IN_LONGJMP
:
633 case BUILT_IN_NONLOCAL_GOTO
:
636 " longjmp and nonlocal goto is not const/pure\n");
637 local
->pure_const_state
= IPA_NEITHER
;
638 local
->looping
= true;
644 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
645 local
->can_free
= true;
647 /* When not in IPA mode, we can still handle self recursion. */
649 && recursive_call_p (current_function_decl
, callee_t
))
652 fprintf (dump_file
, " Recursive call can loop.\n");
653 local
->looping
= true;
655 /* Either callee is unknown or we are doing local analysis.
656 Look to see if there are any bits available for the callee (such as by
657 declaration or because it is builtin) and process solely on the basis of
658 those bits. Handle internal calls always, those calls don't have
659 corresponding cgraph edges and thus aren't processed during
661 else if (!ipa
|| gimple_call_internal_p (call
))
663 enum pure_const_state_e call_state
;
665 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
668 fprintf (dump_file
, " can throw; looping\n");
669 local
->looping
= true;
671 if (possibly_throws_externally
)
675 fprintf (dump_file
, " can throw externally to lp %i\n",
676 lookup_stmt_eh_lp (call
));
678 fprintf (dump_file
, " callee:%s\n",
679 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
681 local
->can_throw
= true;
683 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
684 fprintf (dump_file
, " checking flags for call:");
685 state_from_flags (&call_state
, &call_looping
, flags
,
686 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
687 == (ECF_NORETURN
| ECF_NOTHROW
))
688 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
689 worse_state (&local
->pure_const_state
, &local
->looping
,
690 call_state
, call_looping
, NULL
, NULL
);
692 /* Direct functions calls are handled by IPA propagation. */
695 /* Wrapper around check_decl for loads in local more. */
698 check_load (gimple
*, tree op
, tree
, void *data
)
701 check_decl ((funct_state
)data
, op
, false, false);
703 check_op ((funct_state
)data
, op
, false);
707 /* Wrapper around check_decl for stores in local more. */
710 check_store (gimple
*, tree op
, tree
, void *data
)
713 check_decl ((funct_state
)data
, op
, true, false);
715 check_op ((funct_state
)data
, op
, true);
719 /* Wrapper around check_decl for loads in ipa mode. */
722 check_ipa_load (gimple
*, tree op
, tree
, void *data
)
725 check_decl ((funct_state
)data
, op
, false, true);
727 check_op ((funct_state
)data
, op
, false);
731 /* Wrapper around check_decl for stores in ipa mode. */
734 check_ipa_store (gimple
*, tree op
, tree
, void *data
)
737 check_decl ((funct_state
)data
, op
, true, true);
739 check_op ((funct_state
)data
, op
, true);
743 /* Look into pointer pointed to by GSIP and figure out what interesting side
746 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
748 gimple
*stmt
= gsi_stmt (*gsip
);
750 if (is_gimple_debug (stmt
))
753 /* Do consider clobber as side effects before IPA, so we rather inline
754 C++ destructors and keep clobber semantics than eliminate them.
756 Similar logic is in ipa-modref.
758 TODO: We may get smarter during early optimizations on these and let
759 functions containing only clobbers to be optimized more. This is a common
760 case of C++ destructors. */
762 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
767 fprintf (dump_file
, " scanning: ");
768 print_gimple_stmt (dump_file
, stmt
, 0);
771 if (gimple_has_volatile_ops (stmt
)
772 && !gimple_clobber_p (stmt
))
774 local
->pure_const_state
= IPA_NEITHER
;
776 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
779 /* Look for loads and stores. */
780 walk_stmt_load_store_ops (stmt
, local
,
781 ipa
? check_ipa_load
: check_load
,
782 ipa
? check_ipa_store
: check_store
);
784 if (gimple_code (stmt
) != GIMPLE_CALL
785 && stmt_could_throw_p (cfun
, stmt
))
787 if (cfun
->can_throw_non_call_exceptions
)
790 fprintf (dump_file
, " can throw; looping\n");
791 local
->looping
= true;
793 if (stmt_can_throw_external (cfun
, stmt
))
796 fprintf (dump_file
, " can throw externally\n");
797 local
->can_throw
= true;
801 fprintf (dump_file
, " can throw\n");
803 switch (gimple_code (stmt
))
806 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
809 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
810 /* Target of long jump. */
813 fprintf (dump_file
, " nonlocal label is not const/pure\n");
814 local
->pure_const_state
= IPA_NEITHER
;
818 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
821 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
822 /* Abandon all hope, ye who enter here. */
823 local
->pure_const_state
= IPA_NEITHER
;
824 local
->can_free
= true;
826 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
829 fprintf (dump_file
, " volatile is not const/pure\n");
830 /* Abandon all hope, ye who enter here. */
831 local
->pure_const_state
= IPA_NEITHER
;
832 local
->looping
= true;
833 local
->can_free
= true;
841 /* Check that RETVAL is used only in STMT and in comparisons against 0.
842 RETVAL is return value of the function and STMT is return stmt. */
845 check_retval_uses (tree retval
, gimple
*stmt
)
847 imm_use_iterator use_iter
;
850 FOR_EACH_IMM_USE_STMT (use_stmt
, use_iter
, retval
)
851 if (gcond
*cond
= dyn_cast
<gcond
*> (use_stmt
))
853 tree op2
= gimple_cond_rhs (cond
);
854 if (!integer_zerop (op2
))
857 else if (gassign
*ga
= dyn_cast
<gassign
*> (use_stmt
))
859 enum tree_code code
= gimple_assign_rhs_code (ga
);
860 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
862 if (!integer_zerop (gimple_assign_rhs2 (ga
)))
865 else if (is_gimple_debug (use_stmt
))
867 else if (use_stmt
!= stmt
)
873 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
874 attribute. Currently this function does a very conservative analysis.
875 FUN is considered to be a candidate if
876 1) It returns a value of pointer type.
877 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
878 a phi, and element of phi is either NULL or
879 SSA_NAME_DEF_STMT(element) is function call.
880 3) The return-value has immediate uses only within comparisons (gcond or gassign)
881 and return_stmt (and likewise a phi arg has immediate use only within comparison
884 #define DUMP_AND_RETURN(reason) \
886 if (dump_file && (dump_flags & TDF_DETAILS)) \
887 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
888 (node->dump_name ()), (reason)); \
893 malloc_candidate_p_1 (function
*fun
, tree retval
, gimple
*ret_stmt
, bool ipa
,
896 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
897 if (!bitmap_set_bit (visited
, SSA_NAME_VERSION (retval
)))
900 if (!check_retval_uses (retval
, ret_stmt
))
901 DUMP_AND_RETURN("Return value has uses outside return stmt"
902 " and comparisons against 0.")
904 gimple
*def
= SSA_NAME_DEF_STMT (retval
);
906 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (def
))
908 tree callee_decl
= gimple_call_fndecl (call_stmt
);
912 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
913 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
916 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
919 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
920 es
->is_return_callee_uncaptured
= true;
924 else if (gphi
*phi
= dyn_cast
<gphi
*> (def
))
926 bool all_args_zero
= true;
927 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
929 tree arg
= gimple_phi_arg_def (phi
, i
);
930 if (integer_zerop (arg
))
933 all_args_zero
= false;
934 if (TREE_CODE (arg
) != SSA_NAME
)
935 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
936 if (!check_retval_uses (arg
, phi
))
937 DUMP_AND_RETURN ("phi arg has uses outside phi"
938 " and comparisons against 0.")
940 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
941 if (is_a
<gphi
*> (arg_def
))
943 if (!malloc_candidate_p_1 (fun
, arg
, phi
, ipa
, visited
))
944 DUMP_AND_RETURN ("nested phi fail")
948 gcall
*call_stmt
= dyn_cast
<gcall
*> (arg_def
);
950 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
952 tree callee_decl
= gimple_call_fndecl (call_stmt
);
955 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
956 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
957 " for non-ipa mode.")
959 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
962 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
963 es
->is_return_callee_uncaptured
= true;
968 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
972 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
978 malloc_candidate_p (function
*fun
, bool ipa
)
980 basic_block exit_block
= EXIT_BLOCK_PTR_FOR_FN (fun
);
983 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
985 if (EDGE_COUNT (exit_block
->preds
) == 0
986 || !flag_delete_null_pointer_checks
)
990 FOR_EACH_EDGE (e
, ei
, exit_block
->preds
)
992 gimple_stmt_iterator gsi
= gsi_last_bb (e
->src
);
993 greturn
*ret_stmt
= dyn_cast
<greturn
*> (gsi_stmt (gsi
));
998 tree retval
= gimple_return_retval (ret_stmt
);
1000 DUMP_AND_RETURN("No return value.")
1002 if (TREE_CODE (retval
) != SSA_NAME
1003 || TREE_CODE (TREE_TYPE (retval
)) != POINTER_TYPE
)
1004 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1006 if (!malloc_candidate_p_1 (fun
, retval
, ret_stmt
, ipa
, visited
))
1010 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1011 fprintf (dump_file
, "\nFound %s to be candidate for malloc attribute\n",
1012 IDENTIFIER_POINTER (DECL_NAME (fun
->decl
)));
1016 #undef DUMP_AND_RETURN
1018 /* Return true if function is known to be finite. */
1021 finite_function_p ()
1023 /* Const functions cannot have back edges (an
1024 indication of possible infinite loop side
1027 if (mark_dfs_back_edges ())
1029 /* Preheaders are needed for SCEV to work.
1030 Simple latches and recorded exits improve chances that loop will
1031 proved to be finite in testcases such as in loop-15.c
1033 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1034 | LOOPS_HAVE_SIMPLE_LATCHES
1035 | LOOPS_HAVE_RECORDED_EXITS
);
1036 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1037 flow_loops_dump (dump_file
, NULL
, 0);
1038 if (mark_irreducible_loops ())
1041 fprintf (dump_file
, " has irreducible loops\n");
1047 for (auto loop
: loops_list (cfun
, 0))
1048 if (!finite_loop_p (loop
))
1051 fprintf (dump_file
, " cannot prove finiteness of "
1052 "loop %i\n", loop
->num
);
1058 loop_optimizer_finalize ();
1063 /* This is the main routine for finding the reference patterns for
1064 global variables within a function FN. */
1067 analyze_function (struct cgraph_node
*fn
, bool ipa
)
1069 tree decl
= fn
->decl
;
1071 basic_block this_block
;
1073 l
= XCNEW (class funct_state_d
);
1074 l
->pure_const_state
= IPA_CONST
;
1075 l
->state_previously_known
= IPA_NEITHER
;
1076 l
->looping_previously_known
= true;
1078 l
->can_throw
= false;
1079 l
->can_free
= false;
1080 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
1081 flags_from_decl_or_type (fn
->decl
),
1082 fn
->cannot_return_p ());
1084 if (fn
->thunk
|| fn
->alias
)
1086 /* Thunk gets propagated through, so nothing interesting happens. */
1088 if (fn
->thunk
&& thunk_info::get (fn
)->virtual_offset_p
)
1089 l
->pure_const_state
= IPA_NEITHER
;
1095 fprintf (dump_file
, "\n\n local analysis of %s\n ",
1099 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1101 FOR_EACH_BB_FN (this_block
, cfun
)
1103 gimple_stmt_iterator gsi
;
1104 struct walk_stmt_info wi
;
1106 memset (&wi
, 0, sizeof (wi
));
1107 for (gsi
= gsi_start_bb (this_block
);
1111 /* NULL memory accesses terminates BB. These accesses are known
1112 to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1113 to volatile accesses and adds builtin_trap call which would
1114 confuse us otherwise. */
1115 if (infer_nonnull_range_by_dereference (gsi_stmt (gsi
),
1119 fprintf (dump_file
, " NULL memory access; terminating BB%s\n",
1120 flag_non_call_exceptions
? "; looping" : "");
1121 if (flag_non_call_exceptions
)
1124 if (stmt_can_throw_external (cfun
, gsi_stmt (gsi
)))
1127 fprintf (dump_file
, " can throw externally\n");
1128 l
->can_throw
= true;
1133 check_stmt (&gsi
, l
, ipa
);
1134 if (l
->pure_const_state
== IPA_NEITHER
1143 if (l
->pure_const_state
!= IPA_NEITHER
1145 && !finite_function_p ())
1148 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1149 fprintf (dump_file
, " checking previously known:");
1151 better_state (&l
->pure_const_state
, &l
->looping
,
1152 l
->state_previously_known
,
1153 l
->looping_previously_known
);
1154 if (TREE_NOTHROW (decl
))
1155 l
->can_throw
= false;
1157 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1158 if (DECL_IS_MALLOC (decl
))
1159 l
->malloc_state
= STATE_MALLOC
;
1160 else if (ipa
&& malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), true))
1161 l
->malloc_state
= STATE_MALLOC_TOP
;
1162 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), false))
1163 l
->malloc_state
= STATE_MALLOC
;
1169 fprintf (dump_file
, "Function is locally looping.\n");
1171 fprintf (dump_file
, "Function is locally throwing.\n");
1172 if (l
->pure_const_state
== IPA_CONST
)
1173 fprintf (dump_file
, "Function is locally const.\n");
1174 if (l
->pure_const_state
== IPA_PURE
)
1175 fprintf (dump_file
, "Function is locally pure.\n");
1177 fprintf (dump_file
, "Function can locally free.\n");
1178 if (l
->malloc_state
== STATE_MALLOC
)
1179 fprintf (dump_file
, "Function is locally malloc.\n");
1185 funct_state_summary_t::insert (cgraph_node
*node
, funct_state_d
*state
)
1187 /* There are some shared nodes, in particular the initializers on
1188 static declarations. We do not need to scan them more than once
1189 since all we would be interested in are the addressof
1191 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1193 funct_state_d
*a
= analyze_function (node
, true);
1194 new (state
) funct_state_d (*a
);
1198 /* Do not keep stale summaries. */
1199 funct_state_summaries
->remove (node
);
1202 /* Called when new clone is inserted to callgraph late. */
1205 funct_state_summary_t::duplicate (cgraph_node
*, cgraph_node
*dst
,
1206 funct_state_d
*src_data
,
1207 funct_state_d
*dst_data
)
1209 new (dst_data
) funct_state_d (*src_data
);
1210 if (dst_data
->malloc_state
== STATE_MALLOC
1211 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst
->decl
))))
1212 dst_data
->malloc_state
= STATE_MALLOC_BOTTOM
;
1217 pass_ipa_pure_const::
1218 register_hooks (void)
1225 funct_state_summaries
= new funct_state_summary_t (symtab
);
1229 /* Analyze each function in the cgraph to see if it is locally PURE or
1233 pure_const_generate_summary (void)
1235 struct cgraph_node
*node
;
1237 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1238 pass
->register_hooks ();
1240 /* Process all of the functions.
1242 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1243 by default, but the info can be used at LTO with -fwhole-program or
1244 when function got cloned and the clone is AVAILABLE. */
1246 FOR_EACH_DEFINED_FUNCTION (node
)
1247 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1249 funct_state_d
*a
= analyze_function (node
, true);
1250 new (funct_state_summaries
->get_create (node
)) funct_state_d (*a
);
1256 /* Serialize the ipa info for lto. */
1259 pure_const_write_summary (void)
1261 struct cgraph_node
*node
;
1262 struct lto_simple_output_block
*ob
1263 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1264 unsigned int count
= 0;
1265 lto_symtab_encoder_iterator lsei
;
1266 lto_symtab_encoder_t encoder
;
1268 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1270 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1271 lsei_next_function_in_partition (&lsei
))
1273 node
= lsei_cgraph_node (lsei
);
1274 if (node
->definition
&& funct_state_summaries
->exists (node
))
1278 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1280 /* Process all of the functions. */
1281 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1282 lsei_next_function_in_partition (&lsei
))
1284 node
= lsei_cgraph_node (lsei
);
1285 funct_state_d
*fs
= funct_state_summaries
->get (node
);
1286 if (node
->definition
&& fs
!= NULL
)
1288 struct bitpack_d bp
;
1290 lto_symtab_encoder_t encoder
;
1292 encoder
= ob
->decl_state
->symtab_node_encoder
;
1293 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1294 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1296 /* Note that flags will need to be read in the opposite
1297 order as we are pushing the bitflags into FLAGS. */
1298 bp
= bitpack_create (ob
->main_stream
);
1299 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1300 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1301 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1302 bp_pack_value (&bp
, fs
->looping
, 1);
1303 bp_pack_value (&bp
, fs
->can_throw
, 1);
1304 bp_pack_value (&bp
, fs
->can_free
, 1);
1305 bp_pack_value (&bp
, fs
->malloc_state
, 2);
1306 streamer_write_bitpack (&bp
);
1310 lto_destroy_simple_output_block (ob
);
1314 /* Deserialize the ipa info for lto. */
1317 pure_const_read_summary (void)
1319 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1320 struct lto_file_decl_data
*file_data
;
1323 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1324 pass
->register_hooks ();
1326 while ((file_data
= file_data_vec
[j
++]))
1330 class lto_input_block
*ib
1331 = lto_create_simple_input_block (file_data
,
1332 LTO_section_ipa_pure_const
,
1337 unsigned int count
= streamer_read_uhwi (ib
);
1339 for (i
= 0; i
< count
; i
++)
1342 struct cgraph_node
*node
;
1343 struct bitpack_d bp
;
1345 lto_symtab_encoder_t encoder
;
1347 index
= streamer_read_uhwi (ib
);
1348 encoder
= file_data
->symtab_node_encoder
;
1349 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1352 fs
= funct_state_summaries
->get_create (node
);
1353 /* Note that the flags must be read in the opposite
1354 order in which they were written (the bitflags were
1355 pushed into FLAGS). */
1356 bp
= streamer_read_bitpack (ib
);
1357 fs
->pure_const_state
1358 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1359 fs
->state_previously_known
1360 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1361 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1362 fs
->looping
= bp_unpack_value (&bp
, 1);
1363 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1364 fs
->can_free
= bp_unpack_value (&bp
, 1);
1366 = (enum malloc_state_e
) bp_unpack_value (&bp
, 2);
1370 int flags
= flags_from_decl_or_type (node
->decl
);
1371 fprintf (dump_file
, "Read info for %s ", node
->dump_name ());
1372 if (flags
& ECF_CONST
)
1373 fprintf (dump_file
, " const");
1374 if (flags
& ECF_PURE
)
1375 fprintf (dump_file
, " pure");
1376 if (flags
& ECF_NOTHROW
)
1377 fprintf (dump_file
, " nothrow");
1378 fprintf (dump_file
, "\n pure const state: %s\n",
1379 pure_const_names
[fs
->pure_const_state
]);
1380 fprintf (dump_file
, " previously known state: %s\n",
1381 pure_const_names
[fs
->state_previously_known
]);
1383 fprintf (dump_file
," function is locally looping\n");
1384 if (fs
->looping_previously_known
)
1385 fprintf (dump_file
," function is previously known looping\n");
1387 fprintf (dump_file
," function is locally throwing\n");
1389 fprintf (dump_file
," function can locally free\n");
1390 fprintf (dump_file
, "\n malloc state: %s\n",
1391 malloc_state_names
[fs
->malloc_state
]);
1395 lto_destroy_simple_input_block (file_data
,
1396 LTO_section_ipa_pure_const
,
1402 /* We only propagate across edges that can throw externally and their callee
1403 is not interposable. */
1406 ignore_edge_for_nothrow (struct cgraph_edge
*e
)
1408 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1411 enum availability avail
;
1412 cgraph_node
*ultimate_target
1413 = e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1414 if (avail
<= AVAIL_INTERPOSABLE
|| TREE_NOTHROW (ultimate_target
->decl
))
1416 return ((opt_for_fn (e
->callee
->decl
, flag_non_call_exceptions
)
1417 && !e
->callee
->binds_to_current_def_p (e
->caller
))
1418 || !opt_for_fn (e
->caller
->decl
, flag_ipa_pure_const
)
1419 || !opt_for_fn (ultimate_target
->decl
, flag_ipa_pure_const
));
1422 /* Return true if NODE is self recursive function.
1423 Indirectly recursive functions appears as non-trivial strongly
1424 connected components, so we need to care about self recursion
1428 self_recursive_p (struct cgraph_node
*node
)
1430 struct cgraph_edge
*e
;
1431 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1432 if (e
->callee
->function_symbol () == node
)
1437 /* Return true if N is cdtor that is not const or pure. In this case we may
1438 need to remove unreachable function if it is marked const/pure. */
1441 cdtor_p (cgraph_node
*n
, void *)
1443 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1444 return ((!TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
))
1445 || DECL_LOOPING_CONST_OR_PURE_P (n
->decl
));
1449 /* Skip edges from and to nodes without ipa_pure_const enabled.
1450 Ignore not available symbols. */
1453 ignore_edge_for_pure_const (struct cgraph_edge
*e
)
1455 enum availability avail
;
1456 cgraph_node
*ultimate_target
1457 = e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1459 return (avail
<= AVAIL_INTERPOSABLE
1460 || !opt_for_fn (e
->caller
->decl
, flag_ipa_pure_const
)
1461 || !opt_for_fn (ultimate_target
->decl
,
1462 flag_ipa_pure_const
));
1465 /* Return true if function should be skipped for local pure const analysis. */
1468 skip_function_for_local_pure_const (struct cgraph_node
*node
)
1470 /* Because we do not schedule pass_fixup_cfg over whole program after early
1471 optimizations we must not promote functions that are called by already
1472 processed functions. */
1474 if (function_called_by_processed_nodes_p ())
1477 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
1480 /* Save some work and do not analyze functions which are interposable and
1481 do not have any non-interposable aliases. */
1482 if (node
->get_availability () <= AVAIL_INTERPOSABLE
1484 && !node
->has_aliases_p ())
1488 "Function is interposable; not analyzing.\n");
1494 /* Make function const and output warning. If LOCAL is true,
1495 return true if anything changed. Otherwise return true if
1496 we may have introduced removale ctors. */
1499 ipa_make_function_const (struct cgraph_node
*node
, bool looping
, bool local
)
1503 if (TREE_READONLY (node
->decl
)
1504 && (looping
|| !DECL_LOOPING_CONST_OR_PURE_P (node
->decl
)))
1506 warn_function_const (node
->decl
, !looping
);
1507 if (local
&& skip_function_for_local_pure_const (node
))
1510 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1511 looping
? "looping " : "",
1512 node
->dump_name ());
1513 if (!local
&& !looping
)
1514 cdtor
= node
->call_for_symbol_and_aliases (cdtor_p
, NULL
, true);
1515 if (!dbg_cnt (ipa_attr
))
1517 if (node
->set_const_flag (true, looping
))
1521 "Declaration updated to be %sconst: %s\n",
1522 looping
? "looping " : "",
1523 node
->dump_name ());
1531 /* Make function const and output warning. If LOCAL is true,
1532 return true if anything changed. Otherwise return true if
1533 we may have introduced removale ctors. */
1536 ipa_make_function_pure (struct cgraph_node
*node
, bool looping
, bool local
)
1540 if (TREE_READONLY (node
->decl
)
1541 || (DECL_PURE_P (node
->decl
)
1542 && (looping
|| !DECL_LOOPING_CONST_OR_PURE_P (node
->decl
))))
1544 warn_function_pure (node
->decl
, !looping
);
1545 if (local
&& skip_function_for_local_pure_const (node
))
1548 fprintf (dump_file
, "Function found to be %spure: %s\n",
1549 looping
? "looping " : "",
1550 node
->dump_name ());
1551 if (!local
&& !looping
)
1552 cdtor
= node
->call_for_symbol_and_aliases (cdtor_p
, NULL
, true);
1553 if (!dbg_cnt (ipa_attr
))
1555 if (node
->set_pure_flag (true, looping
))
1559 "Declaration updated to be %spure: %s\n",
1560 looping
? "looping " : "",
1561 node
->dump_name ());
1569 /* Produce transitive closure over the callgraph and compute pure/const
1573 propagate_pure_const (void)
1575 struct cgraph_node
*node
;
1576 struct cgraph_node
*w
;
1577 struct cgraph_node
**order
=
1578 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1581 struct ipa_dfs_info
* w_info
;
1582 bool remove_p
= false;
1584 order_pos
= ipa_reduced_postorder (order
, true,
1585 ignore_edge_for_pure_const
);
1588 cgraph_node::dump_cgraph (dump_file
);
1589 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1592 /* Propagate the local information through the call graph to produce
1593 the global information. All the nodes within a cycle will have
1594 the same info so we collapse cycles first. Then we can do the
1595 propagation in one pass from the leaves to the roots. */
1596 for (i
= 0; i
< order_pos
; i
++ )
1598 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1599 bool looping
= false;
1606 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1607 fprintf (dump_file
, "Starting cycle\n");
1609 /* Find the worst state for any node in the cycle. */
1611 while (w
&& pure_const_state
!= IPA_NEITHER
)
1613 struct cgraph_edge
*e
;
1614 struct cgraph_edge
*ie
;
1616 struct ipa_ref
*ref
= NULL
;
1618 funct_state w_l
= funct_state_summaries
->get_create (w
);
1619 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1620 fprintf (dump_file
, " Visiting %s state:%s looping %i\n",
1622 pure_const_names
[w_l
->pure_const_state
],
1625 /* First merge in function body properties.
1626 We are safe to pass NULL as FROM and TO because we will take care
1627 of possible interposition when walking callees. */
1628 worse_state (&pure_const_state
, &looping
,
1629 w_l
->pure_const_state
, w_l
->looping
,
1631 if (pure_const_state
== IPA_NEITHER
)
1636 /* We consider recursive cycles as possibly infinite.
1637 This might be relaxed since infinite recursion leads to stack
1642 /* Now walk the edges and merge in callee properties. */
1643 for (e
= w
->callees
; e
&& pure_const_state
!= IPA_NEITHER
;
1646 enum availability avail
;
1647 struct cgraph_node
*y
= e
->callee
->
1648 function_or_virtual_thunk_symbol (&avail
,
1650 enum pure_const_state_e edge_state
= IPA_CONST
;
1651 bool edge_looping
= false;
1653 if (e
->recursive_p ())
1656 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1658 fprintf (dump_file
, " Call to %s",
1659 e
->callee
->dump_name ());
1661 if (avail
> AVAIL_INTERPOSABLE
)
1663 funct_state y_l
= funct_state_summaries
->get_create (y
);
1665 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1668 " state:%s looping:%i\n",
1669 pure_const_names
[y_l
->pure_const_state
],
1672 if (y_l
->pure_const_state
> IPA_PURE
1673 && e
->cannot_lead_to_return_p ())
1675 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1677 " Ignoring side effects"
1678 " -> pure, looping\n");
1679 edge_state
= IPA_PURE
;
1680 edge_looping
= true;
1684 edge_state
= y_l
->pure_const_state
;
1685 edge_looping
= y_l
->looping
;
1688 else if (builtin_safe_for_const_function_p (&edge_looping
,
1690 edge_state
= IPA_CONST
;
1692 state_from_flags (&edge_state
, &edge_looping
,
1693 flags_from_decl_or_type (y
->decl
),
1694 e
->cannot_lead_to_return_p ());
1696 /* Merge the results with what we already know. */
1697 better_state (&edge_state
, &edge_looping
,
1698 w_l
->state_previously_known
,
1699 w_l
->looping_previously_known
);
1700 worse_state (&pure_const_state
, &looping
,
1701 edge_state
, edge_looping
, e
->caller
, e
->callee
);
1702 if (pure_const_state
== IPA_NEITHER
)
1706 /* Now process the indirect call. */
1707 for (ie
= w
->indirect_calls
;
1708 ie
&& pure_const_state
!= IPA_NEITHER
; ie
= ie
->next_callee
)
1710 enum pure_const_state_e edge_state
= IPA_CONST
;
1711 bool edge_looping
= false;
1713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1714 fprintf (dump_file
, " Indirect call");
1715 state_from_flags (&edge_state
, &edge_looping
,
1716 ie
->indirect_info
->ecf_flags
,
1717 ie
->cannot_lead_to_return_p ());
1718 /* Merge the results with what we already know. */
1719 better_state (&edge_state
, &edge_looping
,
1720 w_l
->state_previously_known
,
1721 w_l
->looping_previously_known
);
1722 worse_state (&pure_const_state
, &looping
,
1723 edge_state
, edge_looping
, NULL
, NULL
);
1724 if (pure_const_state
== IPA_NEITHER
)
1728 /* And finally all loads and stores. */
1729 for (i
= 0; w
->iterate_reference (i
, ref
)
1730 && pure_const_state
!= IPA_NEITHER
; i
++)
1732 enum pure_const_state_e ref_state
= IPA_CONST
;
1733 bool ref_looping
= false;
1737 /* readonly reads are safe. */
1738 if (TREE_READONLY (ref
->referred
->decl
))
1740 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1741 fprintf (dump_file
, " nonreadonly global var read\n");
1742 ref_state
= IPA_PURE
;
1745 if (ref
->cannot_lead_to_return ())
1747 ref_state
= IPA_NEITHER
;
1748 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1749 fprintf (dump_file
, " global var write\n");
1756 better_state (&ref_state
, &ref_looping
,
1757 w_l
->state_previously_known
,
1758 w_l
->looping_previously_known
);
1759 worse_state (&pure_const_state
, &looping
,
1760 ref_state
, ref_looping
, NULL
, NULL
);
1761 if (pure_const_state
== IPA_NEITHER
)
1764 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1765 w
= w_info
->next_cycle
;
1767 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1768 fprintf (dump_file
, "Result %s looping %i\n",
1769 pure_const_names
[pure_const_state
],
1772 /* Find the worst state of can_free for any node in the cycle. */
1773 bool can_free
= false;
1775 while (w
&& !can_free
)
1777 struct cgraph_edge
*e
;
1778 funct_state w_l
= funct_state_summaries
->get (w
);
1781 || w
->get_availability () == AVAIL_INTERPOSABLE
1782 || w
->indirect_calls
)
1785 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1787 enum availability avail
;
1788 struct cgraph_node
*y
= e
->callee
->
1789 function_or_virtual_thunk_symbol (&avail
,
1792 if (avail
> AVAIL_INTERPOSABLE
)
1793 can_free
= funct_state_summaries
->get (y
)->can_free
;
1797 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1798 w
= w_info
->next_cycle
;
1801 /* Copy back the region's pure_const_state which is shared by
1802 all nodes in the region. */
1806 funct_state w_l
= funct_state_summaries
->get (w
);
1807 enum pure_const_state_e this_state
= pure_const_state
;
1808 bool this_looping
= looping
;
1810 w_l
->can_free
= can_free
;
1811 w
->nonfreeing_fn
= !can_free
;
1812 if (!can_free
&& dump_file
)
1813 fprintf (dump_file
, "Function found not to call free: %s\n",
1816 if (w_l
->state_previously_known
!= IPA_NEITHER
1817 && this_state
> w_l
->state_previously_known
)
1819 if (this_state
== IPA_NEITHER
)
1820 this_looping
= w_l
->looping_previously_known
;
1821 this_state
= w_l
->state_previously_known
;
1823 if (!this_looping
&& self_recursive_p (w
))
1824 this_looping
= true;
1825 if (!w_l
->looping_previously_known
)
1826 this_looping
= false;
1828 /* All nodes within a cycle share the same info. */
1829 w_l
->pure_const_state
= this_state
;
1830 w_l
->looping
= this_looping
;
1832 /* Inline clones share declaration with their offline copies;
1833 do not modify their declarations since the offline copy may
1839 remove_p
|= ipa_make_function_const (w
, this_looping
, false);
1843 remove_p
|= ipa_make_function_pure (w
, this_looping
, false);
1849 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1850 w
= w_info
->next_cycle
;
1854 ipa_free_postorder_info ();
1859 /* Produce transitive closure over the callgraph and compute nothrow
1863 propagate_nothrow (void)
1865 struct cgraph_node
*node
;
1866 struct cgraph_node
*w
;
1867 struct cgraph_node
**order
=
1868 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1871 struct ipa_dfs_info
* w_info
;
1873 order_pos
= ipa_reduced_postorder (order
, true,
1874 ignore_edge_for_nothrow
);
1877 cgraph_node::dump_cgraph (dump_file
);
1878 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1881 /* Propagate the local information through the call graph to produce
1882 the global information. All the nodes within a cycle will have
1883 the same info so we collapse cycles first. Then we can do the
1884 propagation in one pass from the leaves to the roots. */
1885 for (i
= 0; i
< order_pos
; i
++ )
1887 bool can_throw
= false;
1893 /* Find the worst state for any node in the cycle. */
1895 while (w
&& !can_throw
)
1897 struct cgraph_edge
*e
, *ie
;
1899 if (!TREE_NOTHROW (w
->decl
))
1901 funct_state w_l
= funct_state_summaries
->get_create (w
);
1904 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1907 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1909 enum availability avail
;
1911 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1914 struct cgraph_node
*y
= e
->callee
->
1915 function_or_virtual_thunk_symbol (&avail
,
1918 /* We can use info about the callee only if we know it
1919 cannot be interposed.
1920 When callee is compiled with non-call exceptions we also
1921 must check that the declaration is bound to current
1922 body as other semantically equivalent body may still
1924 if (avail
<= AVAIL_INTERPOSABLE
1925 || (!TREE_NOTHROW (y
->decl
)
1926 && (funct_state_summaries
->get_create (y
)->can_throw
1927 || (opt_for_fn (y
->decl
, flag_non_call_exceptions
)
1928 && !e
->callee
->binds_to_current_def_p (w
)))))
1931 for (ie
= w
->indirect_calls
; ie
&& !can_throw
;
1932 ie
= ie
->next_callee
)
1933 if (ie
->can_throw_external
1934 && !(ie
->indirect_info
->ecf_flags
& ECF_NOTHROW
))
1937 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1938 w
= w_info
->next_cycle
;
1941 /* Copy back the region's pure_const_state which is shared by
1942 all nodes in the region. */
1946 funct_state w_l
= funct_state_summaries
->get_create (w
);
1947 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1949 /* Inline clones share declaration with their offline copies;
1950 do not modify their declarations since the offline copy may
1954 w
->set_nothrow_flag (true);
1956 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1960 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1961 w_l
->can_throw
= true;
1962 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1963 w
= w_info
->next_cycle
;
1967 ipa_free_postorder_info ();
1971 /* Debugging function to dump state of malloc lattice. */
1975 dump_malloc_lattice (FILE *dump_file
, const char *s
)
1980 fprintf (dump_file
, "\n\nMALLOC LATTICE %s:\n", s
);
1982 FOR_EACH_FUNCTION (node
)
1984 funct_state fs
= funct_state_summaries
->get (node
);
1986 fprintf (dump_file
, "%s: %s\n", node
->dump_name (),
1987 malloc_state_names
[fs
->malloc_state
]);
1991 /* Propagate malloc attribute across the callgraph. */
1994 propagate_malloc (void)
1997 FOR_EACH_FUNCTION (node
)
1999 if (DECL_IS_MALLOC (node
->decl
))
2000 if (!funct_state_summaries
->exists (node
))
2002 funct_state fs
= funct_state_summaries
->get_create (node
);
2003 fs
->malloc_state
= STATE_MALLOC
;
2007 dump_malloc_lattice (dump_file
, "Initial");
2008 struct cgraph_node
**order
2009 = XNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
2010 int order_pos
= ipa_reverse_postorder (order
);
2011 bool changed
= true;
2016 /* Walk in postorder. */
2017 for (int i
= order_pos
- 1; i
>= 0; --i
)
2019 cgraph_node
*node
= order
[i
];
2021 || !node
->definition
2022 || !funct_state_summaries
->exists (node
))
2025 funct_state l
= funct_state_summaries
->get (node
);
2027 /* FIXME: add support for indirect-calls. */
2028 if (node
->indirect_calls
)
2030 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
2034 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
2036 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
2040 if (l
->malloc_state
== STATE_MALLOC_BOTTOM
)
2043 auto_vec
<cgraph_node
*, 16> callees
;
2044 for (cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2046 ipa_call_summary
*es
= ipa_call_summaries
->get_create (cs
);
2047 if (es
&& es
->is_return_callee_uncaptured
)
2048 callees
.safe_push (cs
->callee
);
2051 malloc_state_e new_state
= l
->malloc_state
;
2052 for (unsigned j
= 0; j
< callees
.length (); j
++)
2054 cgraph_node
*callee
= callees
[j
];
2055 if (!funct_state_summaries
->exists (node
))
2057 new_state
= STATE_MALLOC_BOTTOM
;
2060 malloc_state_e callee_state
2061 = funct_state_summaries
->get_create (callee
)->malloc_state
;
2062 if (new_state
< callee_state
)
2063 new_state
= callee_state
;
2065 if (new_state
!= l
->malloc_state
)
2068 l
->malloc_state
= new_state
;
2073 FOR_EACH_DEFINED_FUNCTION (node
)
2074 if (funct_state_summaries
->exists (node
))
2076 funct_state l
= funct_state_summaries
->get (node
);
2078 && l
->malloc_state
== STATE_MALLOC
2079 && !node
->inlined_to
2080 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node
->decl
))))
2082 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2083 fprintf (dump_file
, "Function %s found to be malloc\n",
2084 node
->dump_name ());
2086 bool malloc_decl_p
= DECL_IS_MALLOC (node
->decl
);
2087 node
->set_malloc_flag (true);
2088 if (!malloc_decl_p
&& warn_suggest_attribute_malloc
)
2089 warn_function_malloc (node
->decl
);
2093 dump_malloc_lattice (dump_file
, "after propagation");
2094 ipa_free_postorder_info ();
2098 /* Produce the global information by preforming a transitive closure
2099 on the local information that was produced by generate_summary. */
2102 pass_ipa_pure_const::
2103 execute (function
*)
2107 /* Nothrow makes more function to not lead to return and improve
2109 propagate_nothrow ();
2110 propagate_malloc ();
2111 remove_p
= propagate_pure_const ();
2113 delete funct_state_summaries
;
2114 return remove_p
? TODO_remove_functions
: 0;
2118 gate_pure_const (void)
2120 return flag_ipa_pure_const
|| in_lto_p
;
2123 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
2124 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
2125 pure_const_generate_summary
, /* generate_summary */
2126 pure_const_write_summary
, /* write_summary */
2127 pure_const_read_summary
, /* read_summary */
2128 NULL
, /* write_optimization_summary */
2129 NULL
, /* read_optimization_summary */
2130 NULL
, /* stmt_fixup */
2131 0, /* function_transform_todo_flags_start */
2132 NULL
, /* function_transform */
2133 NULL
), /* variable_transform */
2137 make_pass_ipa_pure_const (gcc::context
*ctxt
)
2139 return new pass_ipa_pure_const (ctxt
);
2142 /* Simple local pass for pure const discovery reusing the analysis from
2143 ipa_pure_const. This pass is effective when executed together with
2144 other optimization passes in early optimization pass queue. */
2148 const pass_data pass_data_local_pure_const
=
2150 GIMPLE_PASS
, /* type */
2151 "local-pure-const", /* name */
2152 OPTGROUP_NONE
, /* optinfo_flags */
2153 TV_IPA_PURE_CONST
, /* tv_id */
2154 0, /* properties_required */
2155 0, /* properties_provided */
2156 0, /* properties_destroyed */
2157 0, /* todo_flags_start */
2158 0, /* todo_flags_finish */
2161 class pass_local_pure_const
: public gimple_opt_pass
2164 pass_local_pure_const (gcc::context
*ctxt
)
2165 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
2168 /* opt_pass methods: */
2169 opt_pass
* clone () final override
2171 return new pass_local_pure_const (m_ctxt
);
2173 bool gate (function
*) final override
{ return gate_pure_const (); }
2174 unsigned int execute (function
*) final override
;
2176 }; // class pass_local_pure_const
2179 pass_local_pure_const::execute (function
*fun
)
2181 bool changed
= false;
2184 struct cgraph_node
*node
;
2186 node
= cgraph_node::get (current_function_decl
);
2187 skip
= skip_function_for_local_pure_const (node
);
2189 if (!warn_suggest_attribute_const
2190 && !warn_suggest_attribute_pure
2194 l
= analyze_function (node
, false);
2196 /* Do NORETURN discovery. */
2197 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
2198 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2200 warn_function_noreturn (fun
->decl
);
2202 fprintf (dump_file
, "Function found to be noreturn: %s\n",
2203 current_function_name ());
2205 /* Update declaration and reduce profile to executed once. */
2206 if (cgraph_node::get (current_function_decl
)->set_noreturn_flag (true))
2208 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
2209 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2212 switch (l
->pure_const_state
)
2215 changed
|= ipa_make_function_const
2216 (cgraph_node::get (current_function_decl
), l
->looping
, true);
2220 changed
|= ipa_make_function_pure
2221 (cgraph_node::get (current_function_decl
), l
->looping
, true);
2227 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
2229 node
->set_nothrow_flag (true);
2232 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2233 current_function_name ());
2236 if (l
->malloc_state
== STATE_MALLOC
2237 && !DECL_IS_MALLOC (current_function_decl
))
2239 node
->set_malloc_flag (true);
2240 if (warn_suggest_attribute_malloc
)
2241 warn_function_malloc (node
->decl
);
2244 fprintf (dump_file
, "Function found to be malloc: %s\n",
2245 node
->dump_name ());
2250 return execute_fixup_cfg ();
2258 make_pass_local_pure_const (gcc::context
*ctxt
)
2260 return new pass_local_pure_const (ctxt
);
2263 /* Emit noreturn warnings. */
2267 const pass_data pass_data_warn_function_noreturn
=
2269 GIMPLE_PASS
, /* type */
2270 "*warn_function_noreturn", /* name */
2271 OPTGROUP_NONE
, /* optinfo_flags */
2272 TV_NONE
, /* tv_id */
2273 PROP_cfg
, /* properties_required */
2274 0, /* properties_provided */
2275 0, /* properties_destroyed */
2276 0, /* todo_flags_start */
2277 0, /* todo_flags_finish */
2280 class pass_warn_function_noreturn
: public gimple_opt_pass
2283 pass_warn_function_noreturn (gcc::context
*ctxt
)
2284 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
2287 /* opt_pass methods: */
2288 bool gate (function
*) final override
2290 return warn_suggest_attribute_noreturn
;
2292 unsigned int execute (function
*fun
) final override
2294 if (!TREE_THIS_VOLATILE (current_function_decl
)
2295 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2296 warn_function_noreturn (current_function_decl
);
2300 }; // class pass_warn_function_noreturn
2305 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
2307 return new pass_warn_function_noreturn (ctxt
);
2310 /* Simple local pass for pure const discovery reusing the analysis from
2311 ipa_pure_const. This pass is effective when executed together with
2312 other optimization passes in early optimization pass queue. */
2316 const pass_data pass_data_nothrow
=
2318 GIMPLE_PASS
, /* type */
2319 "nothrow", /* name */
2320 OPTGROUP_NONE
, /* optinfo_flags */
2321 TV_IPA_PURE_CONST
, /* tv_id */
2322 0, /* properties_required */
2323 0, /* properties_provided */
2324 0, /* properties_destroyed */
2325 0, /* todo_flags_start */
2326 0, /* todo_flags_finish */
2329 class pass_nothrow
: public gimple_opt_pass
2332 pass_nothrow (gcc::context
*ctxt
)
2333 : gimple_opt_pass (pass_data_nothrow
, ctxt
)
2336 /* opt_pass methods: */
2337 opt_pass
* clone () final override
{ return new pass_nothrow (m_ctxt
); }
2338 bool gate (function
*) final override
{ return optimize
; }
2339 unsigned int execute (function
*) final override
;
2341 }; // class pass_nothrow
2344 pass_nothrow::execute (function
*)
2346 struct cgraph_node
*node
;
2347 basic_block this_block
;
2349 if (TREE_NOTHROW (current_function_decl
))
2352 node
= cgraph_node::get (current_function_decl
);
2354 /* We run during lowering, we cannot really use availability yet. */
2355 if (cgraph_node::get (current_function_decl
)->get_availability ()
2356 <= AVAIL_INTERPOSABLE
)
2359 fprintf (dump_file
, "Function is interposable;"
2360 " not analyzing.\n");
2364 FOR_EACH_BB_FN (this_block
, cfun
)
2366 for (gimple_stmt_iterator gsi
= gsi_start_bb (this_block
);
2369 if (stmt_can_throw_external (cfun
, gsi_stmt (gsi
)))
2371 if (is_gimple_call (gsi_stmt (gsi
)))
2373 tree callee_t
= gimple_call_fndecl (gsi_stmt (gsi
));
2374 if (callee_t
&& recursive_call_p (current_function_decl
,
2381 fprintf (dump_file
, "Statement can throw: ");
2382 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0);
2388 node
->set_nothrow_flag (true);
2390 bool cfg_changed
= false;
2391 if (self_recursive_p (node
))
2392 FOR_EACH_BB_FN (this_block
, cfun
)
2393 if (gcall
*g
= safe_dyn_cast
<gcall
*> (*gsi_last_bb (this_block
)))
2395 tree callee_t
= gimple_call_fndecl (g
);
2397 && recursive_call_p (current_function_decl
, callee_t
)
2398 && maybe_clean_eh_stmt (g
)
2399 && gimple_purge_dead_eh_edges (this_block
))
2404 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2405 current_function_name ());
2406 return cfg_changed
? TODO_cleanup_cfg
: 0;
2412 make_pass_nothrow (gcc::context
*ctxt
)
2414 return new pass_nothrow (ctxt
);