1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
44 #include "diagnostic.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
56 #include "tree-scalar-evolution.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 static const char *pure_const_names
[3] = {"const", "pure", "neither"};
84 static const char *malloc_state_names
[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
91 enum pure_const_state_e pure_const_state
;
92 /* What user set here; we can be always sure about this. */
93 enum pure_const_state_e state_previously_known
;
94 bool looping_previously_known
;
96 /* True if the function could possibly infinite loop. There are a
97 lot of ways that this could be determined. We are pretty
98 conservative here. While it is possible to cse pure and const
99 calls, it is not legal to have dce get rid of the call if there
100 is a possibility that the call could infinite loop since this is
101 a behavioral change. */
106 /* If function can call free, munmap or otherwise make previously
107 non-trapping memory accesses trapping. */
110 enum malloc_state_e malloc_state
;
113 /* State used when we know nothing about function. */
114 static struct funct_state_d varying_state
115 = { IPA_NEITHER
, IPA_NEITHER
, true, true, true, true, STATE_MALLOC_BOTTOM
};
118 typedef struct funct_state_d
* funct_state
;
120 /* The storage of the funct_state is abstracted because there is the
121 possibility that it may be desirable to move this to the cgraph
124 /* Array, indexed by cgraph node uid, of function states. */
126 static vec
<funct_state
> funct_state_vec
;
128 static bool gate_pure_const (void);
132 const pass_data pass_data_ipa_pure_const
=
135 "pure-const", /* name */
136 OPTGROUP_NONE
, /* optinfo_flags */
137 TV_IPA_PURE_CONST
, /* tv_id */
138 0, /* properties_required */
139 0, /* properties_provided */
140 0, /* properties_destroyed */
141 0, /* todo_flags_start */
142 0, /* todo_flags_finish */
145 class pass_ipa_pure_const
: public ipa_opt_pass_d
148 pass_ipa_pure_const(gcc::context
*ctxt
);
150 /* opt_pass methods: */
151 bool gate (function
*) { return gate_pure_const (); }
152 unsigned int execute (function
*fun
);
154 void register_hooks (void);
159 /* Holders of ipa cgraph hooks: */
160 struct cgraph_node_hook_list
*function_insertion_hook_holder
;
161 struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
162 struct cgraph_node_hook_list
*node_removal_hook_holder
;
164 }; // class pass_ipa_pure_const
168 /* Try to guess if function body will always be visible to compiler
169 when compiling the call and whether compiler will be able
170 to propagate the information by itself. */
173 function_always_visible_to_compiler_p (tree decl
)
175 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
)
176 || DECL_COMDAT (decl
));
179 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
180 is true if the function is known to be finite. The diagnostic is
181 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
182 OPTION, this function may initialize it and it is always returned
185 static hash_set
<tree
> *
186 suggest_attribute (int option
, tree decl
, bool known_finite
,
187 hash_set
<tree
> *warned_about
,
188 const char * attrib_name
)
190 if (!option_enabled (option
, &global_options
))
192 if (TREE_THIS_VOLATILE (decl
)
193 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
197 warned_about
= new hash_set
<tree
>;
198 if (warned_about
->contains (decl
))
200 warned_about
->add (decl
);
201 warning_at (DECL_SOURCE_LOCATION (decl
),
204 ? G_("function might be candidate for attribute %qs")
205 : G_("function might be candidate for attribute %qs"
206 " if it is known to return normally"), attrib_name
);
210 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
211 is true if the function is known to be finite. */
214 warn_function_pure (tree decl
, bool known_finite
)
216 /* Declaring a void function pure makes no sense and is diagnosed
217 by -Wattributes because calling it would have no effect. */
218 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
221 static hash_set
<tree
> *warned_about
;
223 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
224 known_finite
, warned_about
, "pure");
227 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
228 is true if the function is known to be finite. */
231 warn_function_const (tree decl
, bool known_finite
)
233 /* Declaring a void function const makes no sense is diagnosed
234 by -Wattributes because calling it would have no effect. */
235 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl
))))
238 static hash_set
<tree
> *warned_about
;
240 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
241 known_finite
, warned_about
, "const");
244 /* Emit suggestion about __attribute__((malloc)) for DECL. */
247 warn_function_malloc (tree decl
)
249 static hash_set
<tree
> *warned_about
;
251 = suggest_attribute (OPT_Wsuggest_attribute_malloc
, decl
,
252 true, warned_about
, "malloc");
255 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
258 warn_function_noreturn (tree decl
)
260 tree original_decl
= decl
;
262 cgraph_node
*node
= cgraph_node::get (decl
);
263 if (node
->instrumentation_clone
)
264 decl
= node
->instrumented_version
->decl
;
266 static hash_set
<tree
> *warned_about
;
267 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
268 && targetm
.warn_func_return (decl
))
270 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, original_decl
,
271 true, warned_about
, "noreturn");
275 warn_function_cold (tree decl
)
277 tree original_decl
= decl
;
279 cgraph_node
*node
= cgraph_node::get (decl
);
280 if (node
->instrumentation_clone
)
281 decl
= node
->instrumented_version
->decl
;
283 static hash_set
<tree
> *warned_about
;
285 = suggest_attribute (OPT_Wsuggest_attribute_cold
, original_decl
,
286 true, warned_about
, "cold");
289 /* Return true if we have a function state for NODE. */
292 has_function_state (struct cgraph_node
*node
)
294 if (!funct_state_vec
.exists ()
295 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
297 return funct_state_vec
[node
->uid
] != NULL
;
300 /* Return the function state from NODE. */
302 static inline funct_state
303 get_function_state (struct cgraph_node
*node
)
305 if (!funct_state_vec
.exists ()
306 || funct_state_vec
.length () <= (unsigned int)node
->uid
307 || !funct_state_vec
[node
->uid
])
308 /* We might want to put correct previously_known state into varying. */
309 return &varying_state
;
310 return funct_state_vec
[node
->uid
];
313 /* Set the function state S for NODE. */
316 set_function_state (struct cgraph_node
*node
, funct_state s
)
318 if (!funct_state_vec
.exists ()
319 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
320 funct_state_vec
.safe_grow_cleared (node
->uid
+ 1);
322 /* If funct_state_vec already contains a funct_state, we have to release
323 it before it's going to be ovewritten. */
324 if (funct_state_vec
[node
->uid
] != NULL
325 && funct_state_vec
[node
->uid
] != &varying_state
)
326 free (funct_state_vec
[node
->uid
]);
328 funct_state_vec
[node
->uid
] = s
;
331 /* Check to see if the use (or definition when CHECKING_WRITE is true)
332 variable T is legal in a function that is either pure or const. */
335 check_decl (funct_state local
,
336 tree t
, bool checking_write
, bool ipa
)
338 /* Do not want to do anything with volatile except mark any
339 function that uses one to be not const or pure. */
340 if (TREE_THIS_VOLATILE (t
))
342 local
->pure_const_state
= IPA_NEITHER
;
344 fprintf (dump_file
, " Volatile operand is not const/pure\n");
348 /* Do not care about a local automatic that is not static. */
349 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
352 /* If the variable has the "used" attribute, treat it as if it had a
353 been touched by the devil. */
354 if (DECL_PRESERVE_P (t
))
356 local
->pure_const_state
= IPA_NEITHER
;
358 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
362 /* In IPA mode we are not interested in checking actual loads and stores;
363 they will be processed at propagation time using ipa_ref. */
367 /* Since we have dealt with the locals and params cases above, if we
368 are CHECKING_WRITE, this cannot be a pure or constant
372 local
->pure_const_state
= IPA_NEITHER
;
374 fprintf (dump_file
, " static/global memory write is not const/pure\n");
378 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
380 /* Readonly reads are safe. */
381 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
382 return; /* Read of a constant, do not change the function state. */
386 fprintf (dump_file
, " global memory read is not const\n");
387 /* Just a regular read. */
388 if (local
->pure_const_state
== IPA_CONST
)
389 local
->pure_const_state
= IPA_PURE
;
394 /* Compilation level statics can be read if they are readonly
396 if (TREE_READONLY (t
))
400 fprintf (dump_file
, " static memory read is not const\n");
401 /* Just a regular read. */
402 if (local
->pure_const_state
== IPA_CONST
)
403 local
->pure_const_state
= IPA_PURE
;
408 /* Check to see if the use (or definition when CHECKING_WRITE is true)
409 variable T is legal in a function that is either pure or const. */
412 check_op (funct_state local
, tree t
, bool checking_write
)
414 t
= get_base_address (t
);
415 if (t
&& TREE_THIS_VOLATILE (t
))
417 local
->pure_const_state
= IPA_NEITHER
;
419 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
423 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
424 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
425 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
428 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
431 else if (checking_write
)
433 local
->pure_const_state
= IPA_NEITHER
;
435 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
441 fprintf (dump_file
, " Indirect ref read is not const\n");
442 if (local
->pure_const_state
== IPA_CONST
)
443 local
->pure_const_state
= IPA_PURE
;
447 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
450 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
451 int flags
, bool cannot_lead_to_return
)
454 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
457 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
458 fprintf (dump_file
, " looping\n");
460 if (flags
& ECF_CONST
)
463 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
464 fprintf (dump_file
, " const\n");
466 else if (flags
& ECF_PURE
)
469 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
470 fprintf (dump_file
, " pure\n");
472 else if (cannot_lead_to_return
)
476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
477 fprintf (dump_file
, " ignoring side effects->pure looping\n");
481 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
482 fprintf (dump_file
, " neither\n");
483 *state
= IPA_NEITHER
;
488 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
489 into STATE and LOOPING better of the two variants.
490 Be sure to merge looping correctly. IPA_NEITHER functions
491 have looping 0 even if they don't have to return. */
494 better_state (enum pure_const_state_e
*state
, bool *looping
,
495 enum pure_const_state_e state2
, bool looping2
)
499 if (*state
== IPA_NEITHER
)
502 *looping
= MIN (*looping
, looping2
);
505 else if (state2
!= IPA_NEITHER
)
506 *looping
= MIN (*looping
, looping2
);
509 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
510 into STATE and LOOPING worse of the two variants.
511 N is the actual node called. */
514 worse_state (enum pure_const_state_e
*state
, bool *looping
,
515 enum pure_const_state_e state2
, bool looping2
,
516 struct symtab_node
*from
,
517 struct symtab_node
*to
)
519 /* Consider function:
526 During early optimization we will turn this into:
533 Now if this function will be detected as CONST however when interposed it
534 may end up being just pure. We always must assume the worst scenario here.
536 if (*state
== IPA_CONST
&& state2
== IPA_CONST
537 && to
&& !TREE_READONLY (to
->decl
) && !to
->binds_to_current_def_p (from
))
539 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
540 fprintf (dump_file
, "Dropping state to PURE because call to %s may not "
541 "bind to current def.\n", to
->name ());
544 *state
= MAX (*state
, state2
);
545 *looping
= MAX (*looping
, looping2
);
548 /* Recognize special cases of builtins that are by themselves not pure or const
549 but function using them is. */
551 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
554 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
555 switch (DECL_FUNCTION_CODE (callee
))
557 case BUILT_IN_RETURN
:
558 case BUILT_IN_UNREACHABLE
:
559 CASE_BUILT_IN_ALLOCA
:
560 case BUILT_IN_STACK_SAVE
:
561 case BUILT_IN_STACK_RESTORE
:
562 case BUILT_IN_EH_POINTER
:
563 case BUILT_IN_EH_FILTER
:
564 case BUILT_IN_UNWIND_RESUME
:
565 case BUILT_IN_CXA_END_CLEANUP
:
566 case BUILT_IN_EH_COPY_VALUES
:
567 case BUILT_IN_FRAME_ADDRESS
:
569 case BUILT_IN_APPLY_ARGS
:
570 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
:
571 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
:
575 case BUILT_IN_PREFETCH
:
585 /* Check the parameters of a function call to CALL_EXPR to see if
586 there are any references in the parameters that are not allowed for
587 pure or const functions. Also check to see if this is either an
588 indirect call, a call outside the compilation unit, or has special
589 attributes that may also effect the purity. The CALL_EXPR node for
590 the entire call expression. */
593 check_call (funct_state local
, gcall
*call
, bool ipa
)
595 int flags
= gimple_call_flags (call
);
596 tree callee_t
= gimple_call_fndecl (call
);
597 bool possibly_throws
= stmt_could_throw_p (call
);
598 bool possibly_throws_externally
= (possibly_throws
599 && stmt_can_throw_external (call
));
604 for (i
= 0; i
< gimple_num_ops (call
); i
++)
605 if (gimple_op (call
, i
)
606 && tree_could_throw_p (gimple_op (call
, i
)))
608 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
611 fprintf (dump_file
, " operand can throw; looping\n");
612 local
->looping
= true;
614 if (possibly_throws_externally
)
617 fprintf (dump_file
, " operand can throw externally\n");
618 local
->can_throw
= true;
623 /* The const and pure flags are set by a variety of places in the
624 compiler (including here). If someone has already set the flags
625 for the callee, (such as for some of the builtins) we will use
626 them, otherwise we will compute our own information.
628 Const and pure functions have less clobber effects than other
629 functions so we process these first. Otherwise if it is a call
630 outside the compilation unit or an indirect call we punt. This
631 leaves local calls which will be processed by following the call
635 enum pure_const_state_e call_state
;
638 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
639 && !nonfreeing_call_p (call
))
640 local
->can_free
= true;
642 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
644 worse_state (&local
->pure_const_state
, &local
->looping
,
645 call_state
, call_looping
,
649 /* When bad things happen to bad functions, they cannot be const
651 if (setjmp_call_p (callee_t
))
654 fprintf (dump_file
, " setjmp is not const/pure\n");
655 local
->looping
= true;
656 local
->pure_const_state
= IPA_NEITHER
;
659 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
660 switch (DECL_FUNCTION_CODE (callee_t
))
662 case BUILT_IN_LONGJMP
:
663 case BUILT_IN_NONLOCAL_GOTO
:
665 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
666 local
->pure_const_state
= IPA_NEITHER
;
667 local
->looping
= true;
673 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
674 local
->can_free
= true;
676 /* When not in IPA mode, we can still handle self recursion. */
678 && recursive_call_p (current_function_decl
, callee_t
))
681 fprintf (dump_file
, " Recursive call can loop.\n");
682 local
->looping
= true;
684 /* Either callee is unknown or we are doing local analysis.
685 Look to see if there are any bits available for the callee (such as by
686 declaration or because it is builtin) and process solely on the basis of
687 those bits. Handle internal calls always, those calls don't have
688 corresponding cgraph edges and thus aren't processed during
690 else if (!ipa
|| gimple_call_internal_p (call
))
692 enum pure_const_state_e call_state
;
694 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
697 fprintf (dump_file
, " can throw; looping\n");
698 local
->looping
= true;
700 if (possibly_throws_externally
)
704 fprintf (dump_file
, " can throw externally to lp %i\n",
705 lookup_stmt_eh_lp (call
));
707 fprintf (dump_file
, " callee:%s\n",
708 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
710 local
->can_throw
= true;
712 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
713 fprintf (dump_file
, " checking flags for call:");
714 state_from_flags (&call_state
, &call_looping
, flags
,
715 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
716 == (ECF_NORETURN
| ECF_NOTHROW
))
717 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
718 worse_state (&local
->pure_const_state
, &local
->looping
,
719 call_state
, call_looping
, NULL
, NULL
);
721 /* Direct functions calls are handled by IPA propagation. */
724 /* Wrapper around check_decl for loads in local more. */
727 check_load (gimple
*, tree op
, tree
, void *data
)
730 check_decl ((funct_state
)data
, op
, false, false);
732 check_op ((funct_state
)data
, op
, false);
736 /* Wrapper around check_decl for stores in local more. */
739 check_store (gimple
*, tree op
, tree
, void *data
)
742 check_decl ((funct_state
)data
, op
, true, false);
744 check_op ((funct_state
)data
, op
, true);
748 /* Wrapper around check_decl for loads in ipa mode. */
751 check_ipa_load (gimple
*, tree op
, tree
, void *data
)
754 check_decl ((funct_state
)data
, op
, false, true);
756 check_op ((funct_state
)data
, op
, false);
760 /* Wrapper around check_decl for stores in ipa mode. */
763 check_ipa_store (gimple
*, tree op
, tree
, void *data
)
766 check_decl ((funct_state
)data
, op
, true, true);
768 check_op ((funct_state
)data
, op
, true);
772 /* Look into pointer pointed to by GSIP and figure out what interesting side
775 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
777 gimple
*stmt
= gsi_stmt (*gsip
);
779 if (is_gimple_debug (stmt
))
782 /* Do consider clobber as side effects before IPA, so we rather inline
783 C++ destructors and keep clobber semantics than eliminate them.
785 TODO: We may get smarter during early optimizations on these and let
786 functions containing only clobbers to be optimized more. This is a common
787 case of C++ destructors. */
789 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
794 fprintf (dump_file
, " scanning: ");
795 print_gimple_stmt (dump_file
, stmt
, 0);
798 if (gimple_has_volatile_ops (stmt
)
799 && !gimple_clobber_p (stmt
))
801 local
->pure_const_state
= IPA_NEITHER
;
803 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
806 /* Look for loads and stores. */
807 walk_stmt_load_store_ops (stmt
, local
,
808 ipa
? check_ipa_load
: check_load
,
809 ipa
? check_ipa_store
: check_store
);
811 if (gimple_code (stmt
) != GIMPLE_CALL
812 && stmt_could_throw_p (stmt
))
814 if (cfun
->can_throw_non_call_exceptions
)
817 fprintf (dump_file
, " can throw; looping\n");
818 local
->looping
= true;
820 if (stmt_can_throw_external (stmt
))
823 fprintf (dump_file
, " can throw externally\n");
824 local
->can_throw
= true;
828 fprintf (dump_file
, " can throw\n");
830 switch (gimple_code (stmt
))
833 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
836 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
837 /* Target of long jump. */
840 fprintf (dump_file
, " nonlocal label is not const/pure\n");
841 local
->pure_const_state
= IPA_NEITHER
;
845 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
848 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
849 /* Abandon all hope, ye who enter here. */
850 local
->pure_const_state
= IPA_NEITHER
;
851 local
->can_free
= true;
853 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
856 fprintf (dump_file
, " volatile is not const/pure\n");
857 /* Abandon all hope, ye who enter here. */
858 local
->pure_const_state
= IPA_NEITHER
;
859 local
->looping
= true;
860 local
->can_free
= true;
868 /* Check that RETVAL is used only in STMT and in comparisons against 0.
869 RETVAL is return value of the function and STMT is return stmt. */
872 check_retval_uses (tree retval
, gimple
*stmt
)
874 imm_use_iterator use_iter
;
877 FOR_EACH_IMM_USE_STMT (use_stmt
, use_iter
, retval
)
878 if (gcond
*cond
= dyn_cast
<gcond
*> (use_stmt
))
880 tree op2
= gimple_cond_rhs (cond
);
881 if (!integer_zerop (op2
))
882 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
884 else if (gassign
*ga
= dyn_cast
<gassign
*> (use_stmt
))
886 enum tree_code code
= gimple_assign_rhs_code (ga
);
887 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
888 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
889 if (!integer_zerop (gimple_assign_rhs2 (ga
)))
890 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
892 else if (is_gimple_debug (use_stmt
))
894 else if (use_stmt
!= stmt
)
895 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
900 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
901 attribute. Currently this function does a very conservative analysis.
902 FUN is considered to be a candidate if
903 1) It returns a value of pointer type.
904 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
905 a phi, and element of phi is either NULL or
906 SSA_NAME_DEF_STMT(element) is function call.
907 3) The return-value has immediate uses only within comparisons (gcond or gassign)
908 and return_stmt (and likewise a phi arg has immediate use only within comparison
912 malloc_candidate_p (function
*fun
, bool ipa
)
914 basic_block exit_block
= EXIT_BLOCK_PTR_FOR_FN (fun
);
917 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
919 #define DUMP_AND_RETURN(reason) \
921 if (dump_file && (dump_flags & TDF_DETAILS)) \
922 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
923 (node->name()), (reason)); \
927 if (EDGE_COUNT (exit_block
->preds
) == 0
928 || !flag_delete_null_pointer_checks
)
931 FOR_EACH_EDGE (e
, ei
, exit_block
->preds
)
933 gimple_stmt_iterator gsi
= gsi_last_bb (e
->src
);
934 greturn
*ret_stmt
= dyn_cast
<greturn
*> (gsi_stmt (gsi
));
939 tree retval
= gimple_return_retval (ret_stmt
);
941 DUMP_AND_RETURN("No return value.")
943 if (TREE_CODE (retval
) != SSA_NAME
944 || TREE_CODE (TREE_TYPE (retval
)) != POINTER_TYPE
)
945 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
947 if (!check_retval_uses (retval
, ret_stmt
))
948 DUMP_AND_RETURN("Return value has uses outside return stmt"
949 " and comparisons against 0.")
951 gimple
*def
= SSA_NAME_DEF_STMT (retval
);
952 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (def
))
954 tree callee_decl
= gimple_call_fndecl (call_stmt
);
958 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
959 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
962 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
965 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
967 es
->is_return_callee_uncaptured
= true;
971 else if (gphi
*phi
= dyn_cast
<gphi
*> (def
))
973 bool all_args_zero
= true;
974 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
976 tree arg
= gimple_phi_arg_def (phi
, i
);
977 if (integer_zerop (arg
))
980 all_args_zero
= false;
981 if (TREE_CODE (arg
) != SSA_NAME
)
982 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
983 if (!check_retval_uses (arg
, phi
))
984 DUMP_AND_RETURN ("phi arg has uses outside phi"
985 " and comparisons against 0.")
987 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
988 gcall
*call_stmt
= dyn_cast
<gcall
*> (arg_def
);
991 tree callee_decl
= gimple_call_fndecl (call_stmt
);
994 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
995 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
996 " for non-ipa mode.")
998 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
1001 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
1003 es
->is_return_callee_uncaptured
= true;
1008 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.");
1012 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
1015 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1016 fprintf (dump_file
, "\nFound %s to be candidate for malloc attribute\n",
1017 IDENTIFIER_POINTER (DECL_NAME (fun
->decl
)));
1020 #undef DUMP_AND_RETURN
1024 /* This is the main routine for finding the reference patterns for
1025 global variables within a function FN. */
1028 analyze_function (struct cgraph_node
*fn
, bool ipa
)
1030 tree decl
= fn
->decl
;
1032 basic_block this_block
;
1034 l
= XCNEW (struct funct_state_d
);
1035 l
->pure_const_state
= IPA_CONST
;
1036 l
->state_previously_known
= IPA_NEITHER
;
1037 l
->looping_previously_known
= true;
1039 l
->can_throw
= false;
1040 l
->can_free
= false;
1041 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
1042 flags_from_decl_or_type (fn
->decl
),
1043 fn
->cannot_return_p ());
1045 if (fn
->thunk
.thunk_p
|| fn
->alias
)
1047 /* Thunk gets propagated through, so nothing interesting happens. */
1049 if (fn
->thunk
.thunk_p
&& fn
->thunk
.virtual_offset_p
)
1050 l
->pure_const_state
= IPA_NEITHER
;
1056 fprintf (dump_file
, "\n\n local analysis of %s\n ",
1060 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1062 FOR_EACH_BB_FN (this_block
, cfun
)
1064 gimple_stmt_iterator gsi
;
1065 struct walk_stmt_info wi
;
1067 memset (&wi
, 0, sizeof (wi
));
1068 for (gsi
= gsi_start_bb (this_block
);
1072 check_stmt (&gsi
, l
, ipa
);
1073 if (l
->pure_const_state
== IPA_NEITHER
1082 if (l
->pure_const_state
!= IPA_NEITHER
)
1084 /* Const functions cannot have back edges (an
1085 indication of possible infinite loop side
1087 if (mark_dfs_back_edges ())
1089 /* Preheaders are needed for SCEV to work.
1090 Simple latches and recorded exits improve chances that loop will
1091 proved to be finite in testcases such as in loop-15.c
1093 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1094 | LOOPS_HAVE_SIMPLE_LATCHES
1095 | LOOPS_HAVE_RECORDED_EXITS
);
1096 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1097 flow_loops_dump (dump_file
, NULL
, 0);
1098 if (mark_irreducible_loops ())
1101 fprintf (dump_file
, " has irreducible loops\n");
1108 FOR_EACH_LOOP (loop
, 0)
1109 if (!finite_loop_p (loop
))
1112 fprintf (dump_file
, " can not prove finiteness of "
1113 "loop %i\n", loop
->num
);
1119 loop_optimizer_finalize ();
1123 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1124 fprintf (dump_file
, " checking previously known:");
1126 better_state (&l
->pure_const_state
, &l
->looping
,
1127 l
->state_previously_known
,
1128 l
->looping_previously_known
);
1129 if (TREE_NOTHROW (decl
))
1130 l
->can_throw
= false;
1132 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1133 if (DECL_IS_MALLOC (decl
))
1134 l
->malloc_state
= STATE_MALLOC
;
1135 else if (ipa
&& malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), true))
1136 l
->malloc_state
= STATE_MALLOC_TOP
;
1137 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), false))
1138 l
->malloc_state
= STATE_MALLOC
;
1144 fprintf (dump_file
, "Function is locally looping.\n");
1146 fprintf (dump_file
, "Function is locally throwing.\n");
1147 if (l
->pure_const_state
== IPA_CONST
)
1148 fprintf (dump_file
, "Function is locally const.\n");
1149 if (l
->pure_const_state
== IPA_PURE
)
1150 fprintf (dump_file
, "Function is locally pure.\n");
1152 fprintf (dump_file
, "Function can locally free.\n");
1153 if (l
->malloc_state
== STATE_MALLOC
)
1154 fprintf (dump_file
, "Function is locally malloc.\n");
1159 /* Called when new function is inserted to callgraph late. */
1161 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1163 /* There are some shared nodes, in particular the initializers on
1164 static declarations. We do not need to scan them more than once
1165 since all we would be interested in are the addressof
1167 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1168 set_function_state (node
, analyze_function (node
, true));
1171 /* Called when new clone is inserted to callgraph late. */
1174 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
1175 void *data ATTRIBUTE_UNUSED
)
1177 if (has_function_state (src
))
1179 funct_state l
= XNEW (struct funct_state_d
);
1180 gcc_assert (!has_function_state (dst
));
1181 memcpy (l
, get_function_state (src
), sizeof (*l
));
1182 set_function_state (dst
, l
);
1186 /* Called when new clone is inserted to callgraph late. */
1189 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1191 if (has_function_state (node
))
1192 set_function_state (node
, NULL
);
1197 pass_ipa_pure_const::
1198 register_hooks (void)
1205 node_removal_hook_holder
=
1206 symtab
->add_cgraph_removal_hook (&remove_node_data
, NULL
);
1207 node_duplication_hook_holder
=
1208 symtab
->add_cgraph_duplication_hook (&duplicate_node_data
, NULL
);
1209 function_insertion_hook_holder
=
1210 symtab
->add_cgraph_insertion_hook (&add_new_function
, NULL
);
1214 /* Analyze each function in the cgraph to see if it is locally PURE or
1218 pure_const_generate_summary (void)
1220 struct cgraph_node
*node
;
1222 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1223 pass
->register_hooks ();
1225 /* Process all of the functions.
1227 We process AVAIL_INTERPOSABLE functions. We can not use the results
1228 by default, but the info can be used at LTO with -fwhole-program or
1229 when function got cloned and the clone is AVAILABLE. */
1231 FOR_EACH_DEFINED_FUNCTION (node
)
1232 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1233 set_function_state (node
, analyze_function (node
, true));
1237 /* Serialize the ipa info for lto. */
1240 pure_const_write_summary (void)
1242 struct cgraph_node
*node
;
1243 struct lto_simple_output_block
*ob
1244 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1245 unsigned int count
= 0;
1246 lto_symtab_encoder_iterator lsei
;
1247 lto_symtab_encoder_t encoder
;
1249 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1251 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1252 lsei_next_function_in_partition (&lsei
))
1254 node
= lsei_cgraph_node (lsei
);
1255 if (node
->definition
&& has_function_state (node
))
1259 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1261 /* Process all of the functions. */
1262 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1263 lsei_next_function_in_partition (&lsei
))
1265 node
= lsei_cgraph_node (lsei
);
1266 if (node
->definition
&& has_function_state (node
))
1268 struct bitpack_d bp
;
1271 lto_symtab_encoder_t encoder
;
1273 fs
= get_function_state (node
);
1275 encoder
= ob
->decl_state
->symtab_node_encoder
;
1276 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1277 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1279 /* Note that flags will need to be read in the opposite
1280 order as we are pushing the bitflags into FLAGS. */
1281 bp
= bitpack_create (ob
->main_stream
);
1282 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1283 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1284 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1285 bp_pack_value (&bp
, fs
->looping
, 1);
1286 bp_pack_value (&bp
, fs
->can_throw
, 1);
1287 bp_pack_value (&bp
, fs
->can_free
, 1);
1288 bp_pack_value (&bp
, fs
->malloc_state
, 2);
1289 streamer_write_bitpack (&bp
);
1293 lto_destroy_simple_output_block (ob
);
1297 /* Deserialize the ipa info for lto. */
1300 pure_const_read_summary (void)
1302 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1303 struct lto_file_decl_data
*file_data
;
1306 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1307 pass
->register_hooks ();
1309 while ((file_data
= file_data_vec
[j
++]))
1313 struct lto_input_block
*ib
1314 = lto_create_simple_input_block (file_data
,
1315 LTO_section_ipa_pure_const
,
1320 unsigned int count
= streamer_read_uhwi (ib
);
1322 for (i
= 0; i
< count
; i
++)
1325 struct cgraph_node
*node
;
1326 struct bitpack_d bp
;
1328 lto_symtab_encoder_t encoder
;
1330 fs
= XCNEW (struct funct_state_d
);
1331 index
= streamer_read_uhwi (ib
);
1332 encoder
= file_data
->symtab_node_encoder
;
1333 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1335 set_function_state (node
, fs
);
1337 /* Note that the flags must be read in the opposite
1338 order in which they were written (the bitflags were
1339 pushed into FLAGS). */
1340 bp
= streamer_read_bitpack (ib
);
1341 fs
->pure_const_state
1342 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1343 fs
->state_previously_known
1344 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1345 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1346 fs
->looping
= bp_unpack_value (&bp
, 1);
1347 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1348 fs
->can_free
= bp_unpack_value (&bp
, 1);
1350 = (enum malloc_state_e
) bp_unpack_value (&bp
, 2);
1354 int flags
= flags_from_decl_or_type (node
->decl
);
1355 fprintf (dump_file
, "Read info for %s ", node
->dump_name ());
1356 if (flags
& ECF_CONST
)
1357 fprintf (dump_file
, " const");
1358 if (flags
& ECF_PURE
)
1359 fprintf (dump_file
, " pure");
1360 if (flags
& ECF_NOTHROW
)
1361 fprintf (dump_file
, " nothrow");
1362 fprintf (dump_file
, "\n pure const state: %s\n",
1363 pure_const_names
[fs
->pure_const_state
]);
1364 fprintf (dump_file
, " previously known state: %s\n",
1365 pure_const_names
[fs
->state_previously_known
]);
1367 fprintf (dump_file
," function is locally looping\n");
1368 if (fs
->looping_previously_known
)
1369 fprintf (dump_file
," function is previously known looping\n");
1371 fprintf (dump_file
," function is locally throwing\n");
1373 fprintf (dump_file
," function can locally free\n");
1374 fprintf (dump_file
, "\n malloc state: %s\n",
1375 malloc_state_names
[fs
->malloc_state
]);
1379 lto_destroy_simple_input_block (file_data
,
1380 LTO_section_ipa_pure_const
,
1386 /* We only propagate across edges that can throw externally and their callee
1387 is not interposable. */
1390 ignore_edge_for_nothrow (struct cgraph_edge
*e
)
1392 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1395 enum availability avail
;
1396 cgraph_node
*n
= e
->callee
->function_or_virtual_thunk_symbol (&avail
,
1398 if (avail
<= AVAIL_INTERPOSABLE
|| TREE_NOTHROW (n
->decl
))
1400 return opt_for_fn (e
->callee
->decl
, flag_non_call_exceptions
)
1401 && !e
->callee
->binds_to_current_def_p (e
->caller
);
1404 /* Return true if NODE is self recursive function.
1405 Indirectly recursive functions appears as non-trivial strongly
1406 connected components, so we need to care about self recursion
1410 self_recursive_p (struct cgraph_node
*node
)
1412 struct cgraph_edge
*e
;
1413 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1414 if (e
->callee
->function_symbol () == node
)
1419 /* Return true if N is cdtor that is not const or pure. In this case we may
1420 need to remove unreachable function if it is marked const/pure. */
1423 cdtor_p (cgraph_node
*n
, void *)
1425 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1426 return ((!TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
))
1427 || DECL_LOOPING_CONST_OR_PURE_P (n
->decl
));
1431 /* We only propagate across edges with non-interposable callee. */
1434 ignore_edge_for_pure_const (struct cgraph_edge
*e
)
1436 enum availability avail
;
1437 e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1438 return (avail
<= AVAIL_INTERPOSABLE
);
1442 /* Produce transitive closure over the callgraph and compute pure/const
1446 propagate_pure_const (void)
1448 struct cgraph_node
*node
;
1449 struct cgraph_node
*w
;
1450 struct cgraph_node
**order
=
1451 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1454 struct ipa_dfs_info
* w_info
;
1455 bool remove_p
= false;
1458 order_pos
= ipa_reduced_postorder (order
, true, false,
1459 ignore_edge_for_pure_const
);
1462 cgraph_node::dump_cgraph (dump_file
);
1463 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1466 /* Propagate the local information through the call graph to produce
1467 the global information. All the nodes within a cycle will have
1468 the same info so we collapse cycles first. Then we can do the
1469 propagation in one pass from the leaves to the roots. */
1470 for (i
= 0; i
< order_pos
; i
++ )
1472 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1473 bool looping
= false;
1480 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1481 fprintf (dump_file
, "Starting cycle\n");
1483 /* Find the worst state for any node in the cycle. */
1485 while (w
&& pure_const_state
!= IPA_NEITHER
)
1487 struct cgraph_edge
*e
;
1488 struct cgraph_edge
*ie
;
1490 struct ipa_ref
*ref
= NULL
;
1492 funct_state w_l
= get_function_state (w
);
1493 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1494 fprintf (dump_file
, " Visiting %s state:%s looping %i\n",
1496 pure_const_names
[w_l
->pure_const_state
],
1499 /* First merge in function body properties.
1500 We are safe to pass NULL as FROM and TO because we will take care
1501 of possible interposition when walking callees. */
1502 worse_state (&pure_const_state
, &looping
,
1503 w_l
->pure_const_state
, w_l
->looping
,
1505 if (pure_const_state
== IPA_NEITHER
)
1510 /* We consider recursive cycles as possibly infinite.
1511 This might be relaxed since infinite recursion leads to stack
1516 /* Now walk the edges and merge in callee properties. */
1517 for (e
= w
->callees
; e
&& pure_const_state
!= IPA_NEITHER
;
1520 enum availability avail
;
1521 struct cgraph_node
*y
= e
->callee
->
1522 function_or_virtual_thunk_symbol (&avail
,
1524 enum pure_const_state_e edge_state
= IPA_CONST
;
1525 bool edge_looping
= false;
1527 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1529 fprintf (dump_file
, " Call to %s",
1530 e
->callee
->dump_name ());
1532 if (avail
> AVAIL_INTERPOSABLE
)
1534 funct_state y_l
= get_function_state (y
);
1535 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1538 " state:%s looping:%i\n",
1539 pure_const_names
[y_l
->pure_const_state
],
1542 if (y_l
->pure_const_state
> IPA_PURE
1543 && e
->cannot_lead_to_return_p ())
1545 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1547 " Ignoring side effects"
1548 " -> pure, looping\n");
1549 edge_state
= IPA_PURE
;
1550 edge_looping
= true;
1554 edge_state
= y_l
->pure_const_state
;
1555 edge_looping
= y_l
->looping
;
1558 else if (special_builtin_state (&edge_state
, &edge_looping
,
1562 state_from_flags (&edge_state
, &edge_looping
,
1563 flags_from_decl_or_type (y
->decl
),
1564 e
->cannot_lead_to_return_p ());
1566 /* Merge the results with what we already know. */
1567 better_state (&edge_state
, &edge_looping
,
1568 w_l
->state_previously_known
,
1569 w_l
->looping_previously_known
);
1570 worse_state (&pure_const_state
, &looping
,
1571 edge_state
, edge_looping
, e
->caller
, e
->callee
);
1572 if (pure_const_state
== IPA_NEITHER
)
1576 /* Now process the indirect call. */
1577 for (ie
= w
->indirect_calls
;
1578 ie
&& pure_const_state
!= IPA_NEITHER
; ie
= ie
->next_callee
)
1580 enum pure_const_state_e edge_state
= IPA_CONST
;
1581 bool edge_looping
= false;
1583 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1584 fprintf (dump_file
, " Indirect call");
1585 state_from_flags (&edge_state
, &edge_looping
,
1586 ie
->indirect_info
->ecf_flags
,
1587 ie
->cannot_lead_to_return_p ());
1588 /* Merge the results with what we already know. */
1589 better_state (&edge_state
, &edge_looping
,
1590 w_l
->state_previously_known
,
1591 w_l
->looping_previously_known
);
1592 worse_state (&pure_const_state
, &looping
,
1593 edge_state
, edge_looping
, NULL
, NULL
);
1594 if (pure_const_state
== IPA_NEITHER
)
1598 /* And finally all loads and stores. */
1599 for (i
= 0; w
->iterate_reference (i
, ref
)
1600 && pure_const_state
!= IPA_NEITHER
; i
++)
1602 enum pure_const_state_e ref_state
= IPA_CONST
;
1603 bool ref_looping
= false;
1607 /* readonly reads are safe. */
1608 if (TREE_READONLY (ref
->referred
->decl
))
1610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1611 fprintf (dump_file
, " nonreadonly global var read\n");
1612 ref_state
= IPA_PURE
;
1615 if (ref
->cannot_lead_to_return ())
1617 ref_state
= IPA_NEITHER
;
1618 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1619 fprintf (dump_file
, " global var write\n");
1627 better_state (&ref_state
, &ref_looping
,
1628 w_l
->state_previously_known
,
1629 w_l
->looping_previously_known
);
1630 worse_state (&pure_const_state
, &looping
,
1631 ref_state
, ref_looping
, NULL
, NULL
);
1632 if (pure_const_state
== IPA_NEITHER
)
1635 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1636 w
= w_info
->next_cycle
;
1638 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1639 fprintf (dump_file
, "Result %s looping %i\n",
1640 pure_const_names
[pure_const_state
],
1643 /* Find the worst state of can_free for any node in the cycle. */
1644 bool can_free
= false;
1646 while (w
&& !can_free
)
1648 struct cgraph_edge
*e
;
1649 funct_state w_l
= get_function_state (w
);
1652 || w
->get_availability () == AVAIL_INTERPOSABLE
1653 || w
->indirect_calls
)
1656 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1658 enum availability avail
;
1659 struct cgraph_node
*y
= e
->callee
->
1660 function_or_virtual_thunk_symbol (&avail
,
1663 if (avail
> AVAIL_INTERPOSABLE
)
1664 can_free
= get_function_state (y
)->can_free
;
1668 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1669 w
= w_info
->next_cycle
;
1672 /* Copy back the region's pure_const_state which is shared by
1673 all nodes in the region. */
1677 funct_state w_l
= get_function_state (w
);
1678 enum pure_const_state_e this_state
= pure_const_state
;
1679 bool this_looping
= looping
;
1681 w_l
->can_free
= can_free
;
1682 w
->nonfreeing_fn
= !can_free
;
1683 if (!can_free
&& dump_file
)
1684 fprintf (dump_file
, "Function found not to call free: %s\n",
1687 if (w_l
->state_previously_known
!= IPA_NEITHER
1688 && this_state
> w_l
->state_previously_known
)
1690 this_state
= w_l
->state_previously_known
;
1691 if (this_state
== IPA_NEITHER
)
1692 this_looping
= w_l
->looping_previously_known
;
1694 if (!this_looping
&& self_recursive_p (w
))
1695 this_looping
= true;
1696 if (!w_l
->looping_previously_known
)
1697 this_looping
= false;
1699 /* All nodes within a cycle share the same info. */
1700 w_l
->pure_const_state
= this_state
;
1701 w_l
->looping
= this_looping
;
1703 /* Inline clones share declaration with their offline copies;
1704 do not modify their declarations since the offline copy may
1706 if (!w
->global
.inlined_to
)
1710 if (!TREE_READONLY (w
->decl
))
1712 warn_function_const (w
->decl
, !this_looping
);
1714 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1715 this_looping
? "looping " : "",
1718 /* Turning constructor or destructor to non-looping const/pure
1719 enables us to possibly remove the function completely. */
1723 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1725 if (w
->set_const_flag (true, this_looping
))
1729 "Declaration updated to be %sconst: %s\n",
1730 this_looping
? "looping " : "",
1732 remove_p
|= has_cdtor
;
1737 if (!DECL_PURE_P (w
->decl
))
1739 warn_function_pure (w
->decl
, !this_looping
);
1741 fprintf (dump_file
, "Function found to be %spure: %s\n",
1742 this_looping
? "looping " : "",
1748 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1750 if (w
->set_pure_flag (true, this_looping
))
1754 "Declaration updated to be %spure: %s\n",
1755 this_looping
? "looping " : "",
1757 remove_p
|= has_cdtor
;
1764 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1765 w
= w_info
->next_cycle
;
1769 ipa_free_postorder_info ();
1774 /* Produce transitive closure over the callgraph and compute nothrow
1778 propagate_nothrow (void)
1780 struct cgraph_node
*node
;
1781 struct cgraph_node
*w
;
1782 struct cgraph_node
**order
=
1783 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1786 struct ipa_dfs_info
* w_info
;
1788 order_pos
= ipa_reduced_postorder (order
, true, false,
1789 ignore_edge_for_nothrow
);
1792 cgraph_node::dump_cgraph (dump_file
);
1793 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1796 /* Propagate the local information through the call graph to produce
1797 the global information. All the nodes within a cycle will have
1798 the same info so we collapse cycles first. Then we can do the
1799 propagation in one pass from the leaves to the roots. */
1800 for (i
= 0; i
< order_pos
; i
++ )
1802 bool can_throw
= false;
1808 /* Find the worst state for any node in the cycle. */
1810 while (w
&& !can_throw
)
1812 struct cgraph_edge
*e
, *ie
;
1814 if (!TREE_NOTHROW (w
->decl
))
1816 funct_state w_l
= get_function_state (w
);
1819 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1822 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1824 enum availability avail
;
1826 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1829 struct cgraph_node
*y
= e
->callee
->
1830 function_or_virtual_thunk_symbol (&avail
,
1833 /* We can use info about the callee only if we know it can
1835 When callee is compiled with non-call exceptions we also
1836 must check that the declaration is bound to current
1837 body as other semantically equivalent body may still
1839 if (avail
<= AVAIL_INTERPOSABLE
1840 || (!TREE_NOTHROW (y
->decl
)
1841 && (get_function_state (y
)->can_throw
1842 || (opt_for_fn (y
->decl
, flag_non_call_exceptions
)
1843 && !e
->callee
->binds_to_current_def_p (w
)))))
1846 for (ie
= w
->indirect_calls
; ie
&& !can_throw
;
1847 ie
= ie
->next_callee
)
1848 if (ie
->can_throw_external
1849 && !(ie
->indirect_info
->ecf_flags
& ECF_NOTHROW
))
1852 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1853 w
= w_info
->next_cycle
;
1856 /* Copy back the region's pure_const_state which is shared by
1857 all nodes in the region. */
1861 funct_state w_l
= get_function_state (w
);
1862 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1864 /* Inline clones share declaration with their offline copies;
1865 do not modify their declarations since the offline copy may
1867 if (!w
->global
.inlined_to
)
1869 w
->set_nothrow_flag (true);
1871 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1875 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1876 w_l
->can_throw
= true;
1877 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1878 w
= w_info
->next_cycle
;
1882 ipa_free_postorder_info ();
1886 /* Debugging function to dump state of malloc lattice. */
1890 dump_malloc_lattice (FILE *dump_file
, const char *s
)
1895 fprintf (dump_file
, "\n\nMALLOC LATTICE %s:\n", s
);
1897 FOR_EACH_FUNCTION (node
)
1899 funct_state fs
= get_function_state (node
);
1900 malloc_state_e state
= fs
->malloc_state
;
1901 fprintf (dump_file
, "%s: %s\n", node
->name (), malloc_state_names
[state
]);
1905 /* Propagate malloc attribute across the callgraph. */
1908 propagate_malloc (void)
1911 FOR_EACH_FUNCTION (node
)
1913 if (DECL_IS_MALLOC (node
->decl
))
1914 if (!has_function_state (node
))
1916 funct_state l
= XCNEW (struct funct_state_d
);
1918 l
->malloc_state
= STATE_MALLOC
;
1919 set_function_state (node
, l
);
1923 dump_malloc_lattice (dump_file
, "Initial");
1924 struct cgraph_node
**order
1925 = XNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1926 int order_pos
= ipa_reverse_postorder (order
);
1927 bool changed
= true;
1932 /* Walk in postorder. */
1933 for (int i
= order_pos
- 1; i
>= 0; --i
)
1935 cgraph_node
*node
= order
[i
];
1937 || !node
->definition
1938 || !has_function_state (node
))
1941 funct_state l
= get_function_state (node
);
1943 /* FIXME: add support for indirect-calls. */
1944 if (node
->indirect_calls
)
1946 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1950 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
1952 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1956 if (l
->malloc_state
== STATE_MALLOC_BOTTOM
)
1959 vec
<cgraph_node
*> callees
= vNULL
;
1960 for (cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1962 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
1963 if (es
&& es
->is_return_callee_uncaptured
)
1964 callees
.safe_push (cs
->callee
);
1967 malloc_state_e new_state
= l
->malloc_state
;
1968 for (unsigned j
= 0; j
< callees
.length (); j
++)
1970 cgraph_node
*callee
= callees
[j
];
1971 if (!has_function_state (callee
))
1973 new_state
= STATE_MALLOC_BOTTOM
;
1976 malloc_state_e callee_state
= get_function_state (callee
)->malloc_state
;
1977 if (new_state
< callee_state
)
1978 new_state
= callee_state
;
1980 if (new_state
!= l
->malloc_state
)
1983 l
->malloc_state
= new_state
;
1988 FOR_EACH_DEFINED_FUNCTION (node
)
1989 if (has_function_state (node
))
1991 funct_state l
= get_function_state (node
);
1993 && l
->malloc_state
== STATE_MALLOC
1994 && !node
->global
.inlined_to
)
1996 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1997 fprintf (dump_file
, "Function %s found to be malloc\n",
2000 bool malloc_decl_p
= DECL_IS_MALLOC (node
->decl
);
2001 node
->set_malloc_flag (true);
2002 if (!malloc_decl_p
&& warn_suggest_attribute_malloc
)
2003 warn_function_malloc (node
->decl
);
2007 dump_malloc_lattice (dump_file
, "after propagation");
2008 ipa_free_postorder_info ();
2012 /* Produce the global information by preforming a transitive closure
2013 on the local information that was produced by generate_summary. */
2016 pass_ipa_pure_const::
2017 execute (function
*)
2019 struct cgraph_node
*node
;
2022 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
2023 symtab
->remove_cgraph_duplication_hook (node_duplication_hook_holder
);
2024 symtab
->remove_cgraph_removal_hook (node_removal_hook_holder
);
2026 /* Nothrow makes more function to not lead to return and improve
2028 propagate_nothrow ();
2029 propagate_malloc ();
2030 remove_p
= propagate_pure_const ();
2033 FOR_EACH_FUNCTION (node
)
2034 if (has_function_state (node
))
2035 free (get_function_state (node
));
2036 funct_state_vec
.release ();
2037 return remove_p
? TODO_remove_functions
: 0;
2041 gate_pure_const (void)
2043 return flag_ipa_pure_const
|| in_lto_p
;
2046 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
2047 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
2048 pure_const_generate_summary
, /* generate_summary */
2049 pure_const_write_summary
, /* write_summary */
2050 pure_const_read_summary
, /* read_summary */
2051 NULL
, /* write_optimization_summary */
2052 NULL
, /* read_optimization_summary */
2053 NULL
, /* stmt_fixup */
2054 0, /* function_transform_todo_flags_start */
2055 NULL
, /* function_transform */
2056 NULL
), /* variable_transform */
2058 function_insertion_hook_holder(NULL
),
2059 node_duplication_hook_holder(NULL
),
2060 node_removal_hook_holder(NULL
)
2065 make_pass_ipa_pure_const (gcc::context
*ctxt
)
2067 return new pass_ipa_pure_const (ctxt
);
2070 /* Return true if function should be skipped for local pure const analysis. */
2073 skip_function_for_local_pure_const (struct cgraph_node
*node
)
2075 /* Because we do not schedule pass_fixup_cfg over whole program after early
2076 optimizations we must not promote functions that are called by already
2077 processed functions. */
2079 if (function_called_by_processed_nodes_p ())
2082 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
2085 /* Save some work and do not analyze functions which are interposable and
2086 do not have any non-interposable aliases. */
2087 if (node
->get_availability () <= AVAIL_INTERPOSABLE
2088 && !node
->has_aliases_p ())
2092 "Function is interposable; not analyzing.\n");
2098 /* Simple local pass for pure const discovery reusing the analysis from
2099 ipa_pure_const. This pass is effective when executed together with
2100 other optimization passes in early optimization pass queue. */
2104 const pass_data pass_data_local_pure_const
=
2106 GIMPLE_PASS
, /* type */
2107 "local-pure-const", /* name */
2108 OPTGROUP_NONE
, /* optinfo_flags */
2109 TV_IPA_PURE_CONST
, /* tv_id */
2110 0, /* properties_required */
2111 0, /* properties_provided */
2112 0, /* properties_destroyed */
2113 0, /* todo_flags_start */
2114 0, /* todo_flags_finish */
2117 class pass_local_pure_const
: public gimple_opt_pass
2120 pass_local_pure_const (gcc::context
*ctxt
)
2121 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
2124 /* opt_pass methods: */
2125 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
2126 virtual bool gate (function
*) { return gate_pure_const (); }
2127 virtual unsigned int execute (function
*);
2129 }; // class pass_local_pure_const
2132 pass_local_pure_const::execute (function
*fun
)
2134 bool changed
= false;
2137 struct cgraph_node
*node
;
2139 node
= cgraph_node::get (current_function_decl
);
2140 skip
= skip_function_for_local_pure_const (node
);
2142 if (!warn_suggest_attribute_const
2143 && !warn_suggest_attribute_pure
2147 l
= analyze_function (node
, false);
2149 /* Do NORETURN discovery. */
2150 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
2151 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2153 warn_function_noreturn (fun
->decl
);
2155 fprintf (dump_file
, "Function found to be noreturn: %s\n",
2156 current_function_name ());
2158 /* Update declaration and reduce profile to executed once. */
2159 TREE_THIS_VOLATILE (current_function_decl
) = 1;
2160 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
2161 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2166 switch (l
->pure_const_state
)
2169 if (!TREE_READONLY (current_function_decl
))
2171 warn_function_const (current_function_decl
, !l
->looping
);
2173 fprintf (dump_file
, "Function found to be %sconst: %s\n",
2174 l
->looping
? "looping " : "",
2175 current_function_name ());
2177 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2181 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2182 current_function_name ());
2184 if (!skip
&& node
->set_const_flag (true, l
->looping
))
2187 fprintf (dump_file
, "Declaration updated to be %sconst: %s\n",
2188 l
->looping
? "looping " : "",
2189 current_function_name ());
2195 if (!DECL_PURE_P (current_function_decl
))
2197 warn_function_pure (current_function_decl
, !l
->looping
);
2199 fprintf (dump_file
, "Function found to be %spure: %s\n",
2200 l
->looping
? "looping " : "",
2201 current_function_name ());
2203 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2207 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2208 current_function_name ());
2210 if (!skip
&& node
->set_pure_flag (true, l
->looping
))
2213 fprintf (dump_file
, "Declaration updated to be %spure: %s\n",
2214 l
->looping
? "looping " : "",
2215 current_function_name ());
2223 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
2225 node
->set_nothrow_flag (true);
2228 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2229 current_function_name ());
2232 if (l
->malloc_state
== STATE_MALLOC
2233 && !DECL_IS_MALLOC (current_function_decl
))
2235 node
->set_malloc_flag (true);
2236 if (warn_suggest_attribute_malloc
)
2237 warn_function_malloc (node
->decl
);
2240 fprintf (dump_file
, "Function found to be malloc: %s\n",
2246 return execute_fixup_cfg ();
2254 make_pass_local_pure_const (gcc::context
*ctxt
)
2256 return new pass_local_pure_const (ctxt
);
2259 /* Emit noreturn warnings. */
2263 const pass_data pass_data_warn_function_noreturn
=
2265 GIMPLE_PASS
, /* type */
2266 "*warn_function_noreturn", /* name */
2267 OPTGROUP_NONE
, /* optinfo_flags */
2268 TV_NONE
, /* tv_id */
2269 PROP_cfg
, /* properties_required */
2270 0, /* properties_provided */
2271 0, /* properties_destroyed */
2272 0, /* todo_flags_start */
2273 0, /* todo_flags_finish */
2276 class pass_warn_function_noreturn
: public gimple_opt_pass
2279 pass_warn_function_noreturn (gcc::context
*ctxt
)
2280 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
2283 /* opt_pass methods: */
2284 virtual bool gate (function
*) { return warn_suggest_attribute_noreturn
; }
2285 virtual unsigned int execute (function
*fun
)
2287 if (!TREE_THIS_VOLATILE (current_function_decl
)
2288 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2289 warn_function_noreturn (current_function_decl
);
2293 }; // class pass_warn_function_noreturn
2298 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
2300 return new pass_warn_function_noreturn (ctxt
);
2303 /* Simple local pass for pure const discovery reusing the analysis from
2304 ipa_pure_const. This pass is effective when executed together with
2305 other optimization passes in early optimization pass queue. */
2309 const pass_data pass_data_nothrow
=
2311 GIMPLE_PASS
, /* type */
2312 "nothrow", /* name */
2313 OPTGROUP_NONE
, /* optinfo_flags */
2314 TV_IPA_PURE_CONST
, /* tv_id */
2315 0, /* properties_required */
2316 0, /* properties_provided */
2317 0, /* properties_destroyed */
2318 0, /* todo_flags_start */
2319 0, /* todo_flags_finish */
2322 class pass_nothrow
: public gimple_opt_pass
2325 pass_nothrow (gcc::context
*ctxt
)
2326 : gimple_opt_pass (pass_data_nothrow
, ctxt
)
2329 /* opt_pass methods: */
2330 opt_pass
* clone () { return new pass_nothrow (m_ctxt
); }
2331 virtual bool gate (function
*) { return optimize
; }
2332 virtual unsigned int execute (function
*);
2334 }; // class pass_nothrow
2337 pass_nothrow::execute (function
*)
2339 struct cgraph_node
*node
;
2340 basic_block this_block
;
2342 if (TREE_NOTHROW (current_function_decl
))
2345 node
= cgraph_node::get (current_function_decl
);
2347 /* We run during lowering, we can not really use availability yet. */
2348 if (cgraph_node::get (current_function_decl
)->get_availability ()
2349 <= AVAIL_INTERPOSABLE
)
2352 fprintf (dump_file
, "Function is interposable;"
2353 " not analyzing.\n");
2357 FOR_EACH_BB_FN (this_block
, cfun
)
2359 for (gimple_stmt_iterator gsi
= gsi_start_bb (this_block
);
2362 if (stmt_can_throw_external (gsi_stmt (gsi
)))
2364 if (is_gimple_call (gsi_stmt (gsi
)))
2366 tree callee_t
= gimple_call_fndecl (gsi_stmt (gsi
));
2367 if (callee_t
&& recursive_call_p (current_function_decl
,
2374 fprintf (dump_file
, "Statement can throw: ");
2375 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0);
2381 node
->set_nothrow_flag (true);
2383 bool cfg_changed
= false;
2384 if (self_recursive_p (node
))
2385 FOR_EACH_BB_FN (this_block
, cfun
)
2386 if (gimple
*g
= last_stmt (this_block
))
2387 if (is_gimple_call (g
))
2389 tree callee_t
= gimple_call_fndecl (g
);
2391 && recursive_call_p (current_function_decl
, callee_t
)
2392 && maybe_clean_eh_stmt (g
)
2393 && gimple_purge_dead_eh_edges (this_block
))
2398 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2399 current_function_name ());
2400 return cfg_changed
? TODO_cleanup_cfg
: 0;
2406 make_pass_nothrow (gcc::context
*ctxt
)
2408 return new pass_nothrow (ctxt
);