1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
44 #include "diagnostic.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
56 #include "tree-scalar-evolution.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 static const char *pure_const_names
[3] = {"const", "pure", "neither"};
84 static const char *malloc_state_names
[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
91 enum pure_const_state_e pure_const_state
;
92 /* What user set here; we can be always sure about this. */
93 enum pure_const_state_e state_previously_known
;
94 bool looping_previously_known
;
96 /* True if the function could possibly infinite loop. There are a
97 lot of ways that this could be determined. We are pretty
98 conservative here. While it is possible to cse pure and const
99 calls, it is not legal to have dce get rid of the call if there
100 is a possibility that the call could infinite loop since this is
101 a behavioral change. */
106 /* If function can call free, munmap or otherwise make previously
107 non-trapping memory accesses trapping. */
110 enum malloc_state_e malloc_state
;
113 /* State used when we know nothing about function. */
114 static struct funct_state_d varying_state
115 = { IPA_NEITHER
, IPA_NEITHER
, true, true, true, true, STATE_MALLOC_BOTTOM
};
118 typedef struct funct_state_d
* funct_state
;
120 /* The storage of the funct_state is abstracted because there is the
121 possibility that it may be desirable to move this to the cgraph
124 /* Array, indexed by cgraph node uid, of function states. */
126 static vec
<funct_state
> funct_state_vec
;
128 static bool gate_pure_const (void);
132 const pass_data pass_data_ipa_pure_const
=
135 "pure-const", /* name */
136 OPTGROUP_NONE
, /* optinfo_flags */
137 TV_IPA_PURE_CONST
, /* tv_id */
138 0, /* properties_required */
139 0, /* properties_provided */
140 0, /* properties_destroyed */
141 0, /* todo_flags_start */
142 0, /* todo_flags_finish */
145 class pass_ipa_pure_const
: public ipa_opt_pass_d
148 pass_ipa_pure_const(gcc::context
*ctxt
);
150 /* opt_pass methods: */
151 bool gate (function
*) { return gate_pure_const (); }
152 unsigned int execute (function
*fun
);
154 void register_hooks (void);
159 /* Holders of ipa cgraph hooks: */
160 struct cgraph_node_hook_list
*function_insertion_hook_holder
;
161 struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
162 struct cgraph_node_hook_list
*node_removal_hook_holder
;
164 }; // class pass_ipa_pure_const
168 /* Try to guess if function body will always be visible to compiler
169 when compiling the call and whether compiler will be able
170 to propagate the information by itself. */
173 function_always_visible_to_compiler_p (tree decl
)
175 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
)
176 || DECL_COMDAT (decl
));
179 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
180 is true if the function is known to be finite. The diagnostic is
181 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
182 OPTION, this function may initialize it and it is always returned
185 static hash_set
<tree
> *
186 suggest_attribute (int option
, tree decl
, bool known_finite
,
187 hash_set
<tree
> *warned_about
,
188 const char * attrib_name
)
190 if (!option_enabled (option
, &global_options
))
192 if (TREE_THIS_VOLATILE (decl
)
193 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
197 warned_about
= new hash_set
<tree
>;
198 if (warned_about
->contains (decl
))
200 warned_about
->add (decl
);
201 warning_at (DECL_SOURCE_LOCATION (decl
),
204 ? G_("function might be candidate for attribute %qs")
205 : G_("function might be candidate for attribute %qs"
206 " if it is known to return normally"), attrib_name
);
210 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
211 is true if the function is known to be finite. */
214 warn_function_pure (tree decl
, bool known_finite
)
216 static hash_set
<tree
> *warned_about
;
219 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
220 known_finite
, warned_about
, "pure");
223 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
224 is true if the function is known to be finite. */
227 warn_function_const (tree decl
, bool known_finite
)
229 static hash_set
<tree
> *warned_about
;
231 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
232 known_finite
, warned_about
, "const");
235 /* Emit suggestion about __attribute__((malloc)) for DECL. */
238 warn_function_malloc (tree decl
)
240 static hash_set
<tree
> *warned_about
;
242 = suggest_attribute (OPT_Wsuggest_attribute_malloc
, decl
,
243 false, warned_about
, "malloc");
246 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
249 warn_function_noreturn (tree decl
)
251 tree original_decl
= decl
;
253 cgraph_node
*node
= cgraph_node::get (decl
);
254 if (node
->instrumentation_clone
)
255 decl
= node
->instrumented_version
->decl
;
257 static hash_set
<tree
> *warned_about
;
258 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
259 && targetm
.warn_func_return (decl
))
261 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, original_decl
,
262 true, warned_about
, "noreturn");
266 warn_function_cold (tree decl
)
268 tree original_decl
= decl
;
270 cgraph_node
*node
= cgraph_node::get (decl
);
271 if (node
->instrumentation_clone
)
272 decl
= node
->instrumented_version
->decl
;
274 static hash_set
<tree
> *warned_about
;
276 = suggest_attribute (OPT_Wsuggest_attribute_cold
, original_decl
,
277 true, warned_about
, "cold");
280 /* Return true if we have a function state for NODE. */
283 has_function_state (struct cgraph_node
*node
)
285 if (!funct_state_vec
.exists ()
286 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
288 return funct_state_vec
[node
->uid
] != NULL
;
291 /* Return the function state from NODE. */
293 static inline funct_state
294 get_function_state (struct cgraph_node
*node
)
296 if (!funct_state_vec
.exists ()
297 || funct_state_vec
.length () <= (unsigned int)node
->uid
298 || !funct_state_vec
[node
->uid
])
299 /* We might want to put correct previously_known state into varying. */
300 return &varying_state
;
301 return funct_state_vec
[node
->uid
];
304 /* Set the function state S for NODE. */
307 set_function_state (struct cgraph_node
*node
, funct_state s
)
309 if (!funct_state_vec
.exists ()
310 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
311 funct_state_vec
.safe_grow_cleared (node
->uid
+ 1);
313 /* If funct_state_vec already contains a funct_state, we have to release
314 it before it's going to be ovewritten. */
315 if (funct_state_vec
[node
->uid
] != NULL
316 && funct_state_vec
[node
->uid
] != &varying_state
)
317 free (funct_state_vec
[node
->uid
]);
319 funct_state_vec
[node
->uid
] = s
;
322 /* Check to see if the use (or definition when CHECKING_WRITE is true)
323 variable T is legal in a function that is either pure or const. */
326 check_decl (funct_state local
,
327 tree t
, bool checking_write
, bool ipa
)
329 /* Do not want to do anything with volatile except mark any
330 function that uses one to be not const or pure. */
331 if (TREE_THIS_VOLATILE (t
))
333 local
->pure_const_state
= IPA_NEITHER
;
335 fprintf (dump_file
, " Volatile operand is not const/pure\n");
339 /* Do not care about a local automatic that is not static. */
340 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
343 /* If the variable has the "used" attribute, treat it as if it had a
344 been touched by the devil. */
345 if (DECL_PRESERVE_P (t
))
347 local
->pure_const_state
= IPA_NEITHER
;
349 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
353 /* In IPA mode we are not interested in checking actual loads and stores;
354 they will be processed at propagation time using ipa_ref. */
358 /* Since we have dealt with the locals and params cases above, if we
359 are CHECKING_WRITE, this cannot be a pure or constant
363 local
->pure_const_state
= IPA_NEITHER
;
365 fprintf (dump_file
, " static/global memory write is not const/pure\n");
369 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
371 /* Readonly reads are safe. */
372 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
373 return; /* Read of a constant, do not change the function state. */
377 fprintf (dump_file
, " global memory read is not const\n");
378 /* Just a regular read. */
379 if (local
->pure_const_state
== IPA_CONST
)
380 local
->pure_const_state
= IPA_PURE
;
385 /* Compilation level statics can be read if they are readonly
387 if (TREE_READONLY (t
))
391 fprintf (dump_file
, " static memory read is not const\n");
392 /* Just a regular read. */
393 if (local
->pure_const_state
== IPA_CONST
)
394 local
->pure_const_state
= IPA_PURE
;
399 /* Check to see if the use (or definition when CHECKING_WRITE is true)
400 variable T is legal in a function that is either pure or const. */
403 check_op (funct_state local
, tree t
, bool checking_write
)
405 t
= get_base_address (t
);
406 if (t
&& TREE_THIS_VOLATILE (t
))
408 local
->pure_const_state
= IPA_NEITHER
;
410 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
414 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
415 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
416 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
419 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
422 else if (checking_write
)
424 local
->pure_const_state
= IPA_NEITHER
;
426 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
432 fprintf (dump_file
, " Indirect ref read is not const\n");
433 if (local
->pure_const_state
== IPA_CONST
)
434 local
->pure_const_state
= IPA_PURE
;
438 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
441 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
442 int flags
, bool cannot_lead_to_return
)
445 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
449 fprintf (dump_file
, " looping\n");
451 if (flags
& ECF_CONST
)
454 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
455 fprintf (dump_file
, " const\n");
457 else if (flags
& ECF_PURE
)
460 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
461 fprintf (dump_file
, " pure\n");
463 else if (cannot_lead_to_return
)
467 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
468 fprintf (dump_file
, " ignoring side effects->pure looping\n");
472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
473 fprintf (dump_file
, " neither\n");
474 *state
= IPA_NEITHER
;
479 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
480 into STATE and LOOPING better of the two variants.
481 Be sure to merge looping correctly. IPA_NEITHER functions
482 have looping 0 even if they don't have to return. */
485 better_state (enum pure_const_state_e
*state
, bool *looping
,
486 enum pure_const_state_e state2
, bool looping2
)
490 if (*state
== IPA_NEITHER
)
493 *looping
= MIN (*looping
, looping2
);
496 else if (state2
!= IPA_NEITHER
)
497 *looping
= MIN (*looping
, looping2
);
500 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
501 into STATE and LOOPING worse of the two variants.
502 N is the actual node called. */
505 worse_state (enum pure_const_state_e
*state
, bool *looping
,
506 enum pure_const_state_e state2
, bool looping2
,
507 struct symtab_node
*from
,
508 struct symtab_node
*to
)
510 /* Consider function:
517 During early optimization we will turn this into:
524 Now if this function will be detected as CONST however when interposed it
525 may end up being just pure. We always must assume the worst scenario here.
527 if (*state
== IPA_CONST
&& state2
== IPA_CONST
528 && to
&& !TREE_READONLY (to
->decl
) && !to
->binds_to_current_def_p (from
))
530 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
531 fprintf (dump_file
, "Dropping state to PURE because call to %s may not "
532 "bind to current def.\n", to
->name ());
535 *state
= MAX (*state
, state2
);
536 *looping
= MAX (*looping
, looping2
);
539 /* Recognize special cases of builtins that are by themselves not pure or const
540 but function using them is. */
542 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
545 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
546 switch (DECL_FUNCTION_CODE (callee
))
548 case BUILT_IN_RETURN
:
549 case BUILT_IN_UNREACHABLE
:
550 CASE_BUILT_IN_ALLOCA
:
551 case BUILT_IN_STACK_SAVE
:
552 case BUILT_IN_STACK_RESTORE
:
553 case BUILT_IN_EH_POINTER
:
554 case BUILT_IN_EH_FILTER
:
555 case BUILT_IN_UNWIND_RESUME
:
556 case BUILT_IN_CXA_END_CLEANUP
:
557 case BUILT_IN_EH_COPY_VALUES
:
558 case BUILT_IN_FRAME_ADDRESS
:
560 case BUILT_IN_APPLY_ARGS
:
561 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT
:
562 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
:
566 case BUILT_IN_PREFETCH
:
576 /* Check the parameters of a function call to CALL_EXPR to see if
577 there are any references in the parameters that are not allowed for
578 pure or const functions. Also check to see if this is either an
579 indirect call, a call outside the compilation unit, or has special
580 attributes that may also effect the purity. The CALL_EXPR node for
581 the entire call expression. */
584 check_call (funct_state local
, gcall
*call
, bool ipa
)
586 int flags
= gimple_call_flags (call
);
587 tree callee_t
= gimple_call_fndecl (call
);
588 bool possibly_throws
= stmt_could_throw_p (call
);
589 bool possibly_throws_externally
= (possibly_throws
590 && stmt_can_throw_external (call
));
595 for (i
= 0; i
< gimple_num_ops (call
); i
++)
596 if (gimple_op (call
, i
)
597 && tree_could_throw_p (gimple_op (call
, i
)))
599 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
602 fprintf (dump_file
, " operand can throw; looping\n");
603 local
->looping
= true;
605 if (possibly_throws_externally
)
608 fprintf (dump_file
, " operand can throw externally\n");
609 local
->can_throw
= true;
614 /* The const and pure flags are set by a variety of places in the
615 compiler (including here). If someone has already set the flags
616 for the callee, (such as for some of the builtins) we will use
617 them, otherwise we will compute our own information.
619 Const and pure functions have less clobber effects than other
620 functions so we process these first. Otherwise if it is a call
621 outside the compilation unit or an indirect call we punt. This
622 leaves local calls which will be processed by following the call
626 enum pure_const_state_e call_state
;
629 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
630 && !nonfreeing_call_p (call
))
631 local
->can_free
= true;
633 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
635 worse_state (&local
->pure_const_state
, &local
->looping
,
636 call_state
, call_looping
,
640 /* When bad things happen to bad functions, they cannot be const
642 if (setjmp_call_p (callee_t
))
645 fprintf (dump_file
, " setjmp is not const/pure\n");
646 local
->looping
= true;
647 local
->pure_const_state
= IPA_NEITHER
;
650 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
651 switch (DECL_FUNCTION_CODE (callee_t
))
653 case BUILT_IN_LONGJMP
:
654 case BUILT_IN_NONLOCAL_GOTO
:
656 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
657 local
->pure_const_state
= IPA_NEITHER
;
658 local
->looping
= true;
664 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
665 local
->can_free
= true;
667 /* When not in IPA mode, we can still handle self recursion. */
669 && recursive_call_p (current_function_decl
, callee_t
))
672 fprintf (dump_file
, " Recursive call can loop.\n");
673 local
->looping
= true;
675 /* Either callee is unknown or we are doing local analysis.
676 Look to see if there are any bits available for the callee (such as by
677 declaration or because it is builtin) and process solely on the basis of
678 those bits. Handle internal calls always, those calls don't have
679 corresponding cgraph edges and thus aren't processed during
681 else if (!ipa
|| gimple_call_internal_p (call
))
683 enum pure_const_state_e call_state
;
685 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
688 fprintf (dump_file
, " can throw; looping\n");
689 local
->looping
= true;
691 if (possibly_throws_externally
)
695 fprintf (dump_file
, " can throw externally to lp %i\n",
696 lookup_stmt_eh_lp (call
));
698 fprintf (dump_file
, " callee:%s\n",
699 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
701 local
->can_throw
= true;
703 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
704 fprintf (dump_file
, " checking flags for call:");
705 state_from_flags (&call_state
, &call_looping
, flags
,
706 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
707 == (ECF_NORETURN
| ECF_NOTHROW
))
708 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
709 worse_state (&local
->pure_const_state
, &local
->looping
,
710 call_state
, call_looping
, NULL
, NULL
);
712 /* Direct functions calls are handled by IPA propagation. */
715 /* Wrapper around check_decl for loads in local more. */
718 check_load (gimple
*, tree op
, tree
, void *data
)
721 check_decl ((funct_state
)data
, op
, false, false);
723 check_op ((funct_state
)data
, op
, false);
727 /* Wrapper around check_decl for stores in local more. */
730 check_store (gimple
*, tree op
, tree
, void *data
)
733 check_decl ((funct_state
)data
, op
, true, false);
735 check_op ((funct_state
)data
, op
, true);
739 /* Wrapper around check_decl for loads in ipa mode. */
742 check_ipa_load (gimple
*, tree op
, tree
, void *data
)
745 check_decl ((funct_state
)data
, op
, false, true);
747 check_op ((funct_state
)data
, op
, false);
751 /* Wrapper around check_decl for stores in ipa mode. */
754 check_ipa_store (gimple
*, tree op
, tree
, void *data
)
757 check_decl ((funct_state
)data
, op
, true, true);
759 check_op ((funct_state
)data
, op
, true);
763 /* Look into pointer pointed to by GSIP and figure out what interesting side
766 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
768 gimple
*stmt
= gsi_stmt (*gsip
);
770 if (is_gimple_debug (stmt
))
773 /* Do consider clobber as side effects before IPA, so we rather inline
774 C++ destructors and keep clobber semantics than eliminate them.
776 TODO: We may get smarter during early optimizations on these and let
777 functions containing only clobbers to be optimized more. This is a common
778 case of C++ destructors. */
780 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
785 fprintf (dump_file
, " scanning: ");
786 print_gimple_stmt (dump_file
, stmt
, 0);
789 if (gimple_has_volatile_ops (stmt
)
790 && !gimple_clobber_p (stmt
))
792 local
->pure_const_state
= IPA_NEITHER
;
794 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
797 /* Look for loads and stores. */
798 walk_stmt_load_store_ops (stmt
, local
,
799 ipa
? check_ipa_load
: check_load
,
800 ipa
? check_ipa_store
: check_store
);
802 if (gimple_code (stmt
) != GIMPLE_CALL
803 && stmt_could_throw_p (stmt
))
805 if (cfun
->can_throw_non_call_exceptions
)
808 fprintf (dump_file
, " can throw; looping\n");
809 local
->looping
= true;
811 if (stmt_can_throw_external (stmt
))
814 fprintf (dump_file
, " can throw externally\n");
815 local
->can_throw
= true;
819 fprintf (dump_file
, " can throw\n");
821 switch (gimple_code (stmt
))
824 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
827 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
828 /* Target of long jump. */
831 fprintf (dump_file
, " nonlocal label is not const/pure\n");
832 local
->pure_const_state
= IPA_NEITHER
;
836 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
839 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
840 /* Abandon all hope, ye who enter here. */
841 local
->pure_const_state
= IPA_NEITHER
;
842 local
->can_free
= true;
844 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
847 fprintf (dump_file
, " volatile is not const/pure\n");
848 /* Abandon all hope, ye who enter here. */
849 local
->pure_const_state
= IPA_NEITHER
;
850 local
->looping
= true;
851 local
->can_free
= true;
859 /* Check that RETVAL is used only in STMT and in comparisons against 0.
860 RETVAL is return value of the function and STMT is return stmt. */
863 check_retval_uses (tree retval
, gimple
*stmt
)
865 imm_use_iterator use_iter
;
868 FOR_EACH_IMM_USE_STMT (use_stmt
, use_iter
, retval
)
869 if (gcond
*cond
= dyn_cast
<gcond
*> (use_stmt
))
871 tree op2
= gimple_cond_rhs (cond
);
872 if (!integer_zerop (op2
))
873 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
875 else if (gassign
*ga
= dyn_cast
<gassign
*> (use_stmt
))
877 enum tree_code code
= gimple_assign_rhs_code (ga
);
878 if (TREE_CODE_CLASS (code
) != tcc_comparison
)
879 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
880 if (!integer_zerop (gimple_assign_rhs2 (ga
)))
881 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
883 else if (is_gimple_debug (use_stmt
))
885 else if (use_stmt
!= stmt
)
886 RETURN_FROM_IMM_USE_STMT (use_iter
, false);
891 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
892 attribute. Currently this function does a very conservative analysis.
893 FUN is considered to be a candidate if
894 1) It returns a value of pointer type.
895 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
896 a phi, and element of phi is either NULL or
897 SSA_NAME_DEF_STMT(element) is function call.
898 3) The return-value has immediate uses only within comparisons (gcond or gassign)
899 and return_stmt (and likewise a phi arg has immediate use only within comparison
903 malloc_candidate_p (function
*fun
, bool ipa
)
905 basic_block exit_block
= EXIT_BLOCK_PTR_FOR_FN (fun
);
908 cgraph_node
*node
= cgraph_node::get_create (fun
->decl
);
910 #define DUMP_AND_RETURN(reason) \
912 if (dump_file && (dump_flags & TDF_DETAILS)) \
913 fprintf (dump_file, "%s", (reason)); \
917 if (EDGE_COUNT (exit_block
->preds
) == 0)
920 FOR_EACH_EDGE (e
, ei
, exit_block
->preds
)
922 gimple_stmt_iterator gsi
= gsi_last_bb (e
->src
);
923 greturn
*ret_stmt
= dyn_cast
<greturn
*> (gsi_stmt (gsi
));
928 tree retval
= gimple_return_retval (ret_stmt
);
930 DUMP_AND_RETURN("No return value.")
932 if (TREE_CODE (retval
) != SSA_NAME
933 || TREE_CODE (TREE_TYPE (retval
)) != POINTER_TYPE
)
934 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
936 if (!check_retval_uses (retval
, ret_stmt
))
937 DUMP_AND_RETURN("Return value has uses outside return stmt"
938 " and comparisons against 0.")
940 gimple
*def
= SSA_NAME_DEF_STMT (retval
);
941 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (def
))
943 tree callee_decl
= gimple_call_fndecl (call_stmt
);
947 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
948 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
951 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
954 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
956 es
->is_return_callee_uncaptured
= true;
960 else if (gphi
*phi
= dyn_cast
<gphi
*> (def
))
961 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
963 tree arg
= gimple_phi_arg_def (phi
, i
);
964 if (TREE_CODE (arg
) != SSA_NAME
)
965 DUMP_AND_RETURN("phi arg is not SSA_NAME.")
966 if (!(arg
== null_pointer_node
|| check_retval_uses (arg
, phi
)))
967 DUMP_AND_RETURN("phi arg has uses outside phi"
968 " and comparisons against 0.")
970 gimple
*arg_def
= SSA_NAME_DEF_STMT (arg
);
971 gcall
*call_stmt
= dyn_cast
<gcall
*> (arg_def
);
974 tree callee_decl
= gimple_call_fndecl (call_stmt
);
977 if (!ipa
&& !DECL_IS_MALLOC (callee_decl
))
978 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
981 cgraph_edge
*cs
= node
->get_edge (call_stmt
);
984 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
986 es
->is_return_callee_uncaptured
= true;
991 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
994 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
995 fprintf (dump_file
, "\nFound %s to be candidate for malloc attribute\n",
996 IDENTIFIER_POINTER (DECL_NAME (fun
->decl
)));
999 #undef DUMP_AND_RETURN
1003 /* This is the main routine for finding the reference patterns for
1004 global variables within a function FN. */
1007 analyze_function (struct cgraph_node
*fn
, bool ipa
)
1009 tree decl
= fn
->decl
;
1011 basic_block this_block
;
1013 l
= XCNEW (struct funct_state_d
);
1014 l
->pure_const_state
= IPA_CONST
;
1015 l
->state_previously_known
= IPA_NEITHER
;
1016 l
->looping_previously_known
= true;
1018 l
->can_throw
= false;
1019 l
->can_free
= false;
1020 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
1021 flags_from_decl_or_type (fn
->decl
),
1022 fn
->cannot_return_p ());
1024 if (fn
->thunk
.thunk_p
|| fn
->alias
)
1026 /* Thunk gets propagated through, so nothing interesting happens. */
1028 if (fn
->thunk
.thunk_p
&& fn
->thunk
.virtual_offset_p
)
1029 l
->pure_const_state
= IPA_NEITHER
;
1035 fprintf (dump_file
, "\n\n local analysis of %s\n ",
1039 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1041 FOR_EACH_BB_FN (this_block
, cfun
)
1043 gimple_stmt_iterator gsi
;
1044 struct walk_stmt_info wi
;
1046 memset (&wi
, 0, sizeof (wi
));
1047 for (gsi
= gsi_start_bb (this_block
);
1051 check_stmt (&gsi
, l
, ipa
);
1052 if (l
->pure_const_state
== IPA_NEITHER
1061 if (l
->pure_const_state
!= IPA_NEITHER
)
1063 /* Const functions cannot have back edges (an
1064 indication of possible infinite loop side
1066 if (mark_dfs_back_edges ())
1068 /* Preheaders are needed for SCEV to work.
1069 Simple latches and recorded exits improve chances that loop will
1070 proved to be finite in testcases such as in loop-15.c
1072 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1073 | LOOPS_HAVE_SIMPLE_LATCHES
1074 | LOOPS_HAVE_RECORDED_EXITS
);
1075 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1076 flow_loops_dump (dump_file
, NULL
, 0);
1077 if (mark_irreducible_loops ())
1080 fprintf (dump_file
, " has irreducible loops\n");
1087 FOR_EACH_LOOP (loop
, 0)
1088 if (!finite_loop_p (loop
))
1091 fprintf (dump_file
, " can not prove finiteness of "
1092 "loop %i\n", loop
->num
);
1098 loop_optimizer_finalize ();
1102 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1103 fprintf (dump_file
, " checking previously known:");
1105 better_state (&l
->pure_const_state
, &l
->looping
,
1106 l
->state_previously_known
,
1107 l
->looping_previously_known
);
1108 if (TREE_NOTHROW (decl
))
1109 l
->can_throw
= false;
1111 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1112 if (DECL_IS_MALLOC (decl
))
1113 l
->malloc_state
= STATE_MALLOC
;
1114 else if (ipa
&& malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), true))
1115 l
->malloc_state
= STATE_MALLOC_TOP
;
1116 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl
), false))
1117 l
->malloc_state
= STATE_MALLOC
;
1123 fprintf (dump_file
, "Function is locally looping.\n");
1125 fprintf (dump_file
, "Function is locally throwing.\n");
1126 if (l
->pure_const_state
== IPA_CONST
)
1127 fprintf (dump_file
, "Function is locally const.\n");
1128 if (l
->pure_const_state
== IPA_PURE
)
1129 fprintf (dump_file
, "Function is locally pure.\n");
1131 fprintf (dump_file
, "Function can locally free.\n");
1132 if (l
->malloc_state
== STATE_MALLOC
)
1133 fprintf (dump_file
, "Function is locally malloc.\n");
1138 /* Called when new function is inserted to callgraph late. */
1140 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1142 /* There are some shared nodes, in particular the initializers on
1143 static declarations. We do not need to scan them more than once
1144 since all we would be interested in are the addressof
1146 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1147 set_function_state (node
, analyze_function (node
, true));
1150 /* Called when new clone is inserted to callgraph late. */
1153 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
1154 void *data ATTRIBUTE_UNUSED
)
1156 if (has_function_state (src
))
1158 funct_state l
= XNEW (struct funct_state_d
);
1159 gcc_assert (!has_function_state (dst
));
1160 memcpy (l
, get_function_state (src
), sizeof (*l
));
1161 set_function_state (dst
, l
);
1165 /* Called when new clone is inserted to callgraph late. */
1168 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
1170 if (has_function_state (node
))
1171 set_function_state (node
, NULL
);
1176 pass_ipa_pure_const::
1177 register_hooks (void)
1184 node_removal_hook_holder
=
1185 symtab
->add_cgraph_removal_hook (&remove_node_data
, NULL
);
1186 node_duplication_hook_holder
=
1187 symtab
->add_cgraph_duplication_hook (&duplicate_node_data
, NULL
);
1188 function_insertion_hook_holder
=
1189 symtab
->add_cgraph_insertion_hook (&add_new_function
, NULL
);
1193 /* Analyze each function in the cgraph to see if it is locally PURE or
1197 pure_const_generate_summary (void)
1199 struct cgraph_node
*node
;
1201 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1202 pass
->register_hooks ();
1204 /* Process all of the functions.
1206 We process AVAIL_INTERPOSABLE functions. We can not use the results
1207 by default, but the info can be used at LTO with -fwhole-program or
1208 when function got cloned and the clone is AVAILABLE. */
1210 FOR_EACH_DEFINED_FUNCTION (node
)
1211 if (opt_for_fn (node
->decl
, flag_ipa_pure_const
))
1212 set_function_state (node
, analyze_function (node
, true));
1216 /* Serialize the ipa info for lto. */
1219 pure_const_write_summary (void)
1221 struct cgraph_node
*node
;
1222 struct lto_simple_output_block
*ob
1223 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1224 unsigned int count
= 0;
1225 lto_symtab_encoder_iterator lsei
;
1226 lto_symtab_encoder_t encoder
;
1228 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1230 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1231 lsei_next_function_in_partition (&lsei
))
1233 node
= lsei_cgraph_node (lsei
);
1234 if (node
->definition
&& has_function_state (node
))
1238 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1240 /* Process all of the functions. */
1241 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1242 lsei_next_function_in_partition (&lsei
))
1244 node
= lsei_cgraph_node (lsei
);
1245 if (node
->definition
&& has_function_state (node
))
1247 struct bitpack_d bp
;
1250 lto_symtab_encoder_t encoder
;
1252 fs
= get_function_state (node
);
1254 encoder
= ob
->decl_state
->symtab_node_encoder
;
1255 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1256 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1258 /* Note that flags will need to be read in the opposite
1259 order as we are pushing the bitflags into FLAGS. */
1260 bp
= bitpack_create (ob
->main_stream
);
1261 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1262 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1263 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1264 bp_pack_value (&bp
, fs
->looping
, 1);
1265 bp_pack_value (&bp
, fs
->can_throw
, 1);
1266 bp_pack_value (&bp
, fs
->can_free
, 1);
1267 bp_pack_value (&bp
, fs
->malloc_state
, 2);
1268 streamer_write_bitpack (&bp
);
1272 lto_destroy_simple_output_block (ob
);
1276 /* Deserialize the ipa info for lto. */
1279 pure_const_read_summary (void)
1281 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1282 struct lto_file_decl_data
*file_data
;
1285 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1286 pass
->register_hooks ();
1288 while ((file_data
= file_data_vec
[j
++]))
1292 struct lto_input_block
*ib
1293 = lto_create_simple_input_block (file_data
,
1294 LTO_section_ipa_pure_const
,
1299 unsigned int count
= streamer_read_uhwi (ib
);
1301 for (i
= 0; i
< count
; i
++)
1304 struct cgraph_node
*node
;
1305 struct bitpack_d bp
;
1307 lto_symtab_encoder_t encoder
;
1309 fs
= XCNEW (struct funct_state_d
);
1310 index
= streamer_read_uhwi (ib
);
1311 encoder
= file_data
->symtab_node_encoder
;
1312 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1314 set_function_state (node
, fs
);
1316 /* Note that the flags must be read in the opposite
1317 order in which they were written (the bitflags were
1318 pushed into FLAGS). */
1319 bp
= streamer_read_bitpack (ib
);
1320 fs
->pure_const_state
1321 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1322 fs
->state_previously_known
1323 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1324 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1325 fs
->looping
= bp_unpack_value (&bp
, 1);
1326 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1327 fs
->can_free
= bp_unpack_value (&bp
, 1);
1329 = (enum malloc_state_e
) bp_unpack_value (&bp
, 2);
1333 int flags
= flags_from_decl_or_type (node
->decl
);
1334 fprintf (dump_file
, "Read info for %s ", node
->dump_name ());
1335 if (flags
& ECF_CONST
)
1336 fprintf (dump_file
, " const");
1337 if (flags
& ECF_PURE
)
1338 fprintf (dump_file
, " pure");
1339 if (flags
& ECF_NOTHROW
)
1340 fprintf (dump_file
, " nothrow");
1341 fprintf (dump_file
, "\n pure const state: %s\n",
1342 pure_const_names
[fs
->pure_const_state
]);
1343 fprintf (dump_file
, " previously known state: %s\n",
1344 pure_const_names
[fs
->state_previously_known
]);
1346 fprintf (dump_file
," function is locally looping\n");
1347 if (fs
->looping_previously_known
)
1348 fprintf (dump_file
," function is previously known looping\n");
1350 fprintf (dump_file
," function is locally throwing\n");
1352 fprintf (dump_file
," function can locally free\n");
1353 fprintf (dump_file
, "\n malloc state: %s\n",
1354 malloc_state_names
[fs
->malloc_state
]);
1358 lto_destroy_simple_input_block (file_data
,
1359 LTO_section_ipa_pure_const
,
1365 /* We only propagate across edges that can throw externally and their callee
1366 is not interposable. */
1369 ignore_edge_for_nothrow (struct cgraph_edge
*e
)
1371 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1374 enum availability avail
;
1375 cgraph_node
*n
= e
->callee
->function_or_virtual_thunk_symbol (&avail
,
1377 if (avail
<= AVAIL_INTERPOSABLE
|| TREE_NOTHROW (n
->decl
))
1379 return opt_for_fn (e
->callee
->decl
, flag_non_call_exceptions
)
1380 && !e
->callee
->binds_to_current_def_p (e
->caller
);
1383 /* Return true if NODE is self recursive function.
1384 Indirectly recursive functions appears as non-trivial strongly
1385 connected components, so we need to care about self recursion
1389 self_recursive_p (struct cgraph_node
*node
)
1391 struct cgraph_edge
*e
;
1392 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1393 if (e
->callee
->function_symbol () == node
)
1398 /* Return true if N is cdtor that is not const or pure. In this case we may
1399 need to remove unreachable function if it is marked const/pure. */
1402 cdtor_p (cgraph_node
*n
, void *)
1404 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1405 return ((!TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
))
1406 || DECL_LOOPING_CONST_OR_PURE_P (n
->decl
));
1410 /* We only propagate across edges with non-interposable callee. */
1413 ignore_edge_for_pure_const (struct cgraph_edge
*e
)
1415 enum availability avail
;
1416 e
->callee
->function_or_virtual_thunk_symbol (&avail
, e
->caller
);
1417 return (avail
<= AVAIL_INTERPOSABLE
);
1421 /* Produce transitive closure over the callgraph and compute pure/const
1425 propagate_pure_const (void)
1427 struct cgraph_node
*node
;
1428 struct cgraph_node
*w
;
1429 struct cgraph_node
**order
=
1430 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1433 struct ipa_dfs_info
* w_info
;
1434 bool remove_p
= false;
1437 order_pos
= ipa_reduced_postorder (order
, true, false,
1438 ignore_edge_for_pure_const
);
1441 cgraph_node::dump_cgraph (dump_file
);
1442 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1445 /* Propagate the local information through the call graph to produce
1446 the global information. All the nodes within a cycle will have
1447 the same info so we collapse cycles first. Then we can do the
1448 propagation in one pass from the leaves to the roots. */
1449 for (i
= 0; i
< order_pos
; i
++ )
1451 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1452 bool looping
= false;
1459 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1460 fprintf (dump_file
, "Starting cycle\n");
1462 /* Find the worst state for any node in the cycle. */
1464 while (w
&& pure_const_state
!= IPA_NEITHER
)
1466 struct cgraph_edge
*e
;
1467 struct cgraph_edge
*ie
;
1469 struct ipa_ref
*ref
= NULL
;
1471 funct_state w_l
= get_function_state (w
);
1472 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1473 fprintf (dump_file
, " Visiting %s state:%s looping %i\n",
1475 pure_const_names
[w_l
->pure_const_state
],
1478 /* First merge in function body properties.
1479 We are safe to pass NULL as FROM and TO because we will take care
1480 of possible interposition when walking callees. */
1481 worse_state (&pure_const_state
, &looping
,
1482 w_l
->pure_const_state
, w_l
->looping
,
1484 if (pure_const_state
== IPA_NEITHER
)
1489 /* We consider recursive cycles as possibly infinite.
1490 This might be relaxed since infinite recursion leads to stack
1495 /* Now walk the edges and merge in callee properties. */
1496 for (e
= w
->callees
; e
&& pure_const_state
!= IPA_NEITHER
;
1499 enum availability avail
;
1500 struct cgraph_node
*y
= e
->callee
->
1501 function_or_virtual_thunk_symbol (&avail
,
1503 enum pure_const_state_e edge_state
= IPA_CONST
;
1504 bool edge_looping
= false;
1506 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1508 fprintf (dump_file
, " Call to %s",
1509 e
->callee
->dump_name ());
1511 if (avail
> AVAIL_INTERPOSABLE
)
1513 funct_state y_l
= get_function_state (y
);
1514 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1517 " state:%s looping:%i\n",
1518 pure_const_names
[y_l
->pure_const_state
],
1521 if (y_l
->pure_const_state
> IPA_PURE
1522 && e
->cannot_lead_to_return_p ())
1524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1526 " Ignoring side effects"
1527 " -> pure, looping\n");
1528 edge_state
= IPA_PURE
;
1529 edge_looping
= true;
1533 edge_state
= y_l
->pure_const_state
;
1534 edge_looping
= y_l
->looping
;
1537 else if (special_builtin_state (&edge_state
, &edge_looping
,
1541 state_from_flags (&edge_state
, &edge_looping
,
1542 flags_from_decl_or_type (y
->decl
),
1543 e
->cannot_lead_to_return_p ());
1545 /* Merge the results with what we already know. */
1546 better_state (&edge_state
, &edge_looping
,
1547 w_l
->state_previously_known
,
1548 w_l
->looping_previously_known
);
1549 worse_state (&pure_const_state
, &looping
,
1550 edge_state
, edge_looping
, e
->caller
, e
->callee
);
1551 if (pure_const_state
== IPA_NEITHER
)
1555 /* Now process the indirect call. */
1556 for (ie
= w
->indirect_calls
;
1557 ie
&& pure_const_state
!= IPA_NEITHER
; ie
= ie
->next_callee
)
1559 enum pure_const_state_e edge_state
= IPA_CONST
;
1560 bool edge_looping
= false;
1562 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1563 fprintf (dump_file
, " Indirect call");
1564 state_from_flags (&edge_state
, &edge_looping
,
1565 ie
->indirect_info
->ecf_flags
,
1566 ie
->cannot_lead_to_return_p ());
1567 /* Merge the results with what we already know. */
1568 better_state (&edge_state
, &edge_looping
,
1569 w_l
->state_previously_known
,
1570 w_l
->looping_previously_known
);
1571 worse_state (&pure_const_state
, &looping
,
1572 edge_state
, edge_looping
, NULL
, NULL
);
1573 if (pure_const_state
== IPA_NEITHER
)
1577 /* And finally all loads and stores. */
1578 for (i
= 0; w
->iterate_reference (i
, ref
)
1579 && pure_const_state
!= IPA_NEITHER
; i
++)
1581 enum pure_const_state_e ref_state
= IPA_CONST
;
1582 bool ref_looping
= false;
1586 /* readonly reads are safe. */
1587 if (TREE_READONLY (ref
->referred
->decl
))
1589 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1590 fprintf (dump_file
, " nonreadonly global var read\n");
1591 ref_state
= IPA_PURE
;
1594 if (ref
->cannot_lead_to_return ())
1596 ref_state
= IPA_NEITHER
;
1597 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1598 fprintf (dump_file
, " global var write\n");
1606 better_state (&ref_state
, &ref_looping
,
1607 w_l
->state_previously_known
,
1608 w_l
->looping_previously_known
);
1609 worse_state (&pure_const_state
, &looping
,
1610 ref_state
, ref_looping
, NULL
, NULL
);
1611 if (pure_const_state
== IPA_NEITHER
)
1614 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1615 w
= w_info
->next_cycle
;
1617 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1618 fprintf (dump_file
, "Result %s looping %i\n",
1619 pure_const_names
[pure_const_state
],
1622 /* Find the worst state of can_free for any node in the cycle. */
1623 bool can_free
= false;
1625 while (w
&& !can_free
)
1627 struct cgraph_edge
*e
;
1628 funct_state w_l
= get_function_state (w
);
1631 || w
->get_availability () == AVAIL_INTERPOSABLE
1632 || w
->indirect_calls
)
1635 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1637 enum availability avail
;
1638 struct cgraph_node
*y
= e
->callee
->
1639 function_or_virtual_thunk_symbol (&avail
,
1642 if (avail
> AVAIL_INTERPOSABLE
)
1643 can_free
= get_function_state (y
)->can_free
;
1647 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1648 w
= w_info
->next_cycle
;
1651 /* Copy back the region's pure_const_state which is shared by
1652 all nodes in the region. */
1656 funct_state w_l
= get_function_state (w
);
1657 enum pure_const_state_e this_state
= pure_const_state
;
1658 bool this_looping
= looping
;
1660 w_l
->can_free
= can_free
;
1661 w
->nonfreeing_fn
= !can_free
;
1662 if (!can_free
&& dump_file
)
1663 fprintf (dump_file
, "Function found not to call free: %s\n",
1666 if (w_l
->state_previously_known
!= IPA_NEITHER
1667 && this_state
> w_l
->state_previously_known
)
1669 this_state
= w_l
->state_previously_known
;
1670 if (this_state
== IPA_NEITHER
)
1671 this_looping
= w_l
->looping_previously_known
;
1673 if (!this_looping
&& self_recursive_p (w
))
1674 this_looping
= true;
1675 if (!w_l
->looping_previously_known
)
1676 this_looping
= false;
1678 /* All nodes within a cycle share the same info. */
1679 w_l
->pure_const_state
= this_state
;
1680 w_l
->looping
= this_looping
;
1682 /* Inline clones share declaration with their offline copies;
1683 do not modify their declarations since the offline copy may
1685 if (!w
->global
.inlined_to
)
1689 if (!TREE_READONLY (w
->decl
))
1691 warn_function_const (w
->decl
, !this_looping
);
1693 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1694 this_looping
? "looping " : "",
1697 /* Turning constructor or destructor to non-looping const/pure
1698 enables us to possibly remove the function completely. */
1702 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1704 if (w
->set_const_flag (true, this_looping
))
1708 "Declaration updated to be %sconst: %s\n",
1709 this_looping
? "looping " : "",
1711 remove_p
|= has_cdtor
;
1716 if (!DECL_PURE_P (w
->decl
))
1718 warn_function_pure (w
->decl
, !this_looping
);
1720 fprintf (dump_file
, "Function found to be %spure: %s\n",
1721 this_looping
? "looping " : "",
1727 has_cdtor
= w
->call_for_symbol_and_aliases (cdtor_p
,
1729 if (w
->set_pure_flag (true, this_looping
))
1733 "Declaration updated to be %spure: %s\n",
1734 this_looping
? "looping " : "",
1736 remove_p
|= has_cdtor
;
1743 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1744 w
= w_info
->next_cycle
;
1748 ipa_free_postorder_info ();
1753 /* Produce transitive closure over the callgraph and compute nothrow
1757 propagate_nothrow (void)
1759 struct cgraph_node
*node
;
1760 struct cgraph_node
*w
;
1761 struct cgraph_node
**order
=
1762 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1765 struct ipa_dfs_info
* w_info
;
1767 order_pos
= ipa_reduced_postorder (order
, true, false,
1768 ignore_edge_for_nothrow
);
1771 cgraph_node::dump_cgraph (dump_file
);
1772 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1775 /* Propagate the local information through the call graph to produce
1776 the global information. All the nodes within a cycle will have
1777 the same info so we collapse cycles first. Then we can do the
1778 propagation in one pass from the leaves to the roots. */
1779 for (i
= 0; i
< order_pos
; i
++ )
1781 bool can_throw
= false;
1787 /* Find the worst state for any node in the cycle. */
1789 while (w
&& !can_throw
)
1791 struct cgraph_edge
*e
, *ie
;
1793 if (!TREE_NOTHROW (w
->decl
))
1795 funct_state w_l
= get_function_state (w
);
1798 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1801 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1803 enum availability avail
;
1805 if (!e
->can_throw_external
|| TREE_NOTHROW (e
->callee
->decl
))
1808 struct cgraph_node
*y
= e
->callee
->
1809 function_or_virtual_thunk_symbol (&avail
,
1812 /* We can use info about the callee only if we know it can
1814 When callee is compiled with non-call exceptions we also
1815 must check that the declaration is bound to current
1816 body as other semantically equivalent body may still
1818 if (avail
<= AVAIL_INTERPOSABLE
1819 || (!TREE_NOTHROW (y
->decl
)
1820 && (get_function_state (y
)->can_throw
1821 || (opt_for_fn (y
->decl
, flag_non_call_exceptions
)
1822 && !e
->callee
->binds_to_current_def_p (w
)))))
1825 for (ie
= w
->indirect_calls
; ie
&& !can_throw
;
1826 ie
= ie
->next_callee
)
1827 if (ie
->can_throw_external
1828 && !(ie
->indirect_info
->ecf_flags
& ECF_NOTHROW
))
1831 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1832 w
= w_info
->next_cycle
;
1835 /* Copy back the region's pure_const_state which is shared by
1836 all nodes in the region. */
1840 funct_state w_l
= get_function_state (w
);
1841 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1843 /* Inline clones share declaration with their offline copies;
1844 do not modify their declarations since the offline copy may
1846 if (!w
->global
.inlined_to
)
1848 w
->set_nothrow_flag (true);
1850 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1854 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1855 w_l
->can_throw
= true;
1856 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1857 w
= w_info
->next_cycle
;
1861 ipa_free_postorder_info ();
1865 /* Debugging function to dump state of malloc lattice. */
1869 dump_malloc_lattice (FILE *dump_file
, const char *s
)
1874 fprintf (dump_file
, "\n\nMALLOC LATTICE %s:\n", s
);
1876 FOR_EACH_FUNCTION (node
)
1878 funct_state fs
= get_function_state (node
);
1879 malloc_state_e state
= fs
->malloc_state
;
1880 fprintf (dump_file
, "%s: %s\n", node
->name (), malloc_state_names
[state
]);
1884 /* Propagate malloc attribute across the callgraph. */
1887 propagate_malloc (void)
1890 FOR_EACH_FUNCTION (node
)
1892 if (DECL_IS_MALLOC (node
->decl
))
1893 if (!has_function_state (node
))
1895 funct_state l
= XCNEW (struct funct_state_d
);
1897 l
->malloc_state
= STATE_MALLOC
;
1898 set_function_state (node
, l
);
1902 dump_malloc_lattice (dump_file
, "Initial");
1903 struct cgraph_node
**order
1904 = XNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1905 int order_pos
= ipa_reverse_postorder (order
);
1906 bool changed
= true;
1911 /* Walk in postorder. */
1912 for (int i
= order_pos
- 1; i
>= 0; --i
)
1914 cgraph_node
*node
= order
[i
];
1916 || !node
->definition
1917 || !has_function_state (node
))
1920 funct_state l
= get_function_state (node
);
1922 /* FIXME: add support for indirect-calls. */
1923 if (node
->indirect_calls
)
1925 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1929 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
1931 l
->malloc_state
= STATE_MALLOC_BOTTOM
;
1935 if (l
->malloc_state
== STATE_MALLOC_BOTTOM
)
1938 vec
<cgraph_node
*> callees
= vNULL
;
1939 for (cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
1941 ipa_call_summary
*es
= ipa_call_summaries
->get (cs
);
1942 if (es
&& es
->is_return_callee_uncaptured
)
1943 callees
.safe_push (cs
->callee
);
1946 malloc_state_e new_state
= l
->malloc_state
;
1947 for (unsigned j
= 0; j
< callees
.length (); j
++)
1949 cgraph_node
*callee
= callees
[j
];
1950 if (!has_function_state (callee
))
1952 new_state
= STATE_MALLOC_BOTTOM
;
1955 malloc_state_e callee_state
= get_function_state (callee
)->malloc_state
;
1956 if (new_state
< callee_state
)
1957 new_state
= callee_state
;
1959 if (new_state
!= l
->malloc_state
)
1962 l
->malloc_state
= new_state
;
1967 FOR_EACH_DEFINED_FUNCTION (node
)
1968 if (has_function_state (node
))
1970 funct_state l
= get_function_state (node
);
1972 && l
->malloc_state
== STATE_MALLOC
1973 && !node
->global
.inlined_to
)
1975 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1976 fprintf (dump_file
, "Function %s found to be malloc\n",
1979 bool malloc_decl_p
= DECL_IS_MALLOC (node
->decl
);
1980 node
->set_malloc_flag (true);
1981 if (!malloc_decl_p
&& warn_suggest_attribute_malloc
)
1982 warn_function_malloc (node
->decl
);
1986 dump_malloc_lattice (dump_file
, "after propagation");
1987 ipa_free_postorder_info ();
1991 /* Produce the global information by preforming a transitive closure
1992 on the local information that was produced by generate_summary. */
1995 pass_ipa_pure_const::
1996 execute (function
*)
1998 struct cgraph_node
*node
;
2001 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
2002 symtab
->remove_cgraph_duplication_hook (node_duplication_hook_holder
);
2003 symtab
->remove_cgraph_removal_hook (node_removal_hook_holder
);
2005 /* Nothrow makes more function to not lead to return and improve
2007 propagate_nothrow ();
2008 propagate_malloc ();
2009 remove_p
= propagate_pure_const ();
2012 FOR_EACH_FUNCTION (node
)
2013 if (has_function_state (node
))
2014 free (get_function_state (node
));
2015 funct_state_vec
.release ();
2017 /* In WPA we use inline summaries for partitioning process. */
2019 ipa_free_fn_summary ();
2020 return remove_p
? TODO_remove_functions
: 0;
2024 gate_pure_const (void)
2026 return flag_ipa_pure_const
|| in_lto_p
;
2029 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
2030 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
2031 pure_const_generate_summary
, /* generate_summary */
2032 pure_const_write_summary
, /* write_summary */
2033 pure_const_read_summary
, /* read_summary */
2034 NULL
, /* write_optimization_summary */
2035 NULL
, /* read_optimization_summary */
2036 NULL
, /* stmt_fixup */
2037 0, /* function_transform_todo_flags_start */
2038 NULL
, /* function_transform */
2039 NULL
), /* variable_transform */
2041 function_insertion_hook_holder(NULL
),
2042 node_duplication_hook_holder(NULL
),
2043 node_removal_hook_holder(NULL
)
2048 make_pass_ipa_pure_const (gcc::context
*ctxt
)
2050 return new pass_ipa_pure_const (ctxt
);
2053 /* Return true if function should be skipped for local pure const analysis. */
2056 skip_function_for_local_pure_const (struct cgraph_node
*node
)
2058 /* Because we do not schedule pass_fixup_cfg over whole program after early
2059 optimizations we must not promote functions that are called by already
2060 processed functions. */
2062 if (function_called_by_processed_nodes_p ())
2065 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
2068 /* Save some work and do not analyze functions which are interposable and
2069 do not have any non-interposable aliases. */
2070 if (node
->get_availability () <= AVAIL_INTERPOSABLE
2071 && !node
->has_aliases_p ())
2075 "Function is interposable; not analyzing.\n");
2081 /* Simple local pass for pure const discovery reusing the analysis from
2082 ipa_pure_const. This pass is effective when executed together with
2083 other optimization passes in early optimization pass queue. */
2087 const pass_data pass_data_local_pure_const
=
2089 GIMPLE_PASS
, /* type */
2090 "local-pure-const", /* name */
2091 OPTGROUP_NONE
, /* optinfo_flags */
2092 TV_IPA_PURE_CONST
, /* tv_id */
2093 0, /* properties_required */
2094 0, /* properties_provided */
2095 0, /* properties_destroyed */
2096 0, /* todo_flags_start */
2097 0, /* todo_flags_finish */
2100 class pass_local_pure_const
: public gimple_opt_pass
2103 pass_local_pure_const (gcc::context
*ctxt
)
2104 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
2107 /* opt_pass methods: */
2108 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
2109 virtual bool gate (function
*) { return gate_pure_const (); }
2110 virtual unsigned int execute (function
*);
2112 }; // class pass_local_pure_const
2115 pass_local_pure_const::execute (function
*fun
)
2117 bool changed
= false;
2120 struct cgraph_node
*node
;
2122 node
= cgraph_node::get (current_function_decl
);
2123 skip
= skip_function_for_local_pure_const (node
);
2125 if (!warn_suggest_attribute_const
2126 && !warn_suggest_attribute_pure
2130 l
= analyze_function (node
, false);
2132 /* Do NORETURN discovery. */
2133 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
2134 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2136 warn_function_noreturn (fun
->decl
);
2138 fprintf (dump_file
, "Function found to be noreturn: %s\n",
2139 current_function_name ());
2141 /* Update declaration and reduce profile to executed once. */
2142 TREE_THIS_VOLATILE (current_function_decl
) = 1;
2143 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
2144 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
2149 switch (l
->pure_const_state
)
2152 if (!TREE_READONLY (current_function_decl
))
2154 warn_function_const (current_function_decl
, !l
->looping
);
2156 fprintf (dump_file
, "Function found to be %sconst: %s\n",
2157 l
->looping
? "looping " : "",
2158 current_function_name ());
2160 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2164 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2165 current_function_name ());
2167 if (!skip
&& node
->set_const_flag (true, l
->looping
))
2170 fprintf (dump_file
, "Declaration updated to be %sconst: %s\n",
2171 l
->looping
? "looping " : "",
2172 current_function_name ());
2178 if (!DECL_PURE_P (current_function_decl
))
2180 warn_function_pure (current_function_decl
, !l
->looping
);
2182 fprintf (dump_file
, "Function found to be %spure: %s\n",
2183 l
->looping
? "looping " : "",
2184 current_function_name ());
2186 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
2190 fprintf (dump_file
, "Function found to be non-looping: %s\n",
2191 current_function_name ());
2193 if (!skip
&& node
->set_pure_flag (true, l
->looping
))
2196 fprintf (dump_file
, "Declaration updated to be %spure: %s\n",
2197 l
->looping
? "looping " : "",
2198 current_function_name ());
2206 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
2208 node
->set_nothrow_flag (true);
2211 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2212 current_function_name ());
2215 if (l
->malloc_state
== STATE_MALLOC
2216 && !DECL_IS_MALLOC (current_function_decl
))
2218 node
->set_malloc_flag (true);
2219 if (warn_suggest_attribute_malloc
)
2220 warn_function_malloc (node
->decl
);
2223 fprintf (dump_file
, "Function found to be malloc: %s\n",
2229 return execute_fixup_cfg ();
2237 make_pass_local_pure_const (gcc::context
*ctxt
)
2239 return new pass_local_pure_const (ctxt
);
2242 /* Emit noreturn warnings. */
2246 const pass_data pass_data_warn_function_noreturn
=
2248 GIMPLE_PASS
, /* type */
2249 "*warn_function_noreturn", /* name */
2250 OPTGROUP_NONE
, /* optinfo_flags */
2251 TV_NONE
, /* tv_id */
2252 PROP_cfg
, /* properties_required */
2253 0, /* properties_provided */
2254 0, /* properties_destroyed */
2255 0, /* todo_flags_start */
2256 0, /* todo_flags_finish */
2259 class pass_warn_function_noreturn
: public gimple_opt_pass
2262 pass_warn_function_noreturn (gcc::context
*ctxt
)
2263 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
2266 /* opt_pass methods: */
2267 virtual bool gate (function
*) { return warn_suggest_attribute_noreturn
; }
2268 virtual unsigned int execute (function
*fun
)
2270 if (!TREE_THIS_VOLATILE (current_function_decl
)
2271 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
2272 warn_function_noreturn (current_function_decl
);
2276 }; // class pass_warn_function_noreturn
2281 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
2283 return new pass_warn_function_noreturn (ctxt
);
2286 /* Simple local pass for pure const discovery reusing the analysis from
2287 ipa_pure_const. This pass is effective when executed together with
2288 other optimization passes in early optimization pass queue. */
2292 const pass_data pass_data_nothrow
=
2294 GIMPLE_PASS
, /* type */
2295 "nothrow", /* name */
2296 OPTGROUP_NONE
, /* optinfo_flags */
2297 TV_IPA_PURE_CONST
, /* tv_id */
2298 0, /* properties_required */
2299 0, /* properties_provided */
2300 0, /* properties_destroyed */
2301 0, /* todo_flags_start */
2302 0, /* todo_flags_finish */
2305 class pass_nothrow
: public gimple_opt_pass
2308 pass_nothrow (gcc::context
*ctxt
)
2309 : gimple_opt_pass (pass_data_nothrow
, ctxt
)
2312 /* opt_pass methods: */
2313 opt_pass
* clone () { return new pass_nothrow (m_ctxt
); }
2314 virtual bool gate (function
*) { return optimize
; }
2315 virtual unsigned int execute (function
*);
2317 }; // class pass_nothrow
2320 pass_nothrow::execute (function
*)
2322 struct cgraph_node
*node
;
2323 basic_block this_block
;
2325 if (TREE_NOTHROW (current_function_decl
))
2328 node
= cgraph_node::get (current_function_decl
);
2330 /* We run during lowering, we can not really use availability yet. */
2331 if (cgraph_node::get (current_function_decl
)->get_availability ()
2332 <= AVAIL_INTERPOSABLE
)
2335 fprintf (dump_file
, "Function is interposable;"
2336 " not analyzing.\n");
2340 FOR_EACH_BB_FN (this_block
, cfun
)
2342 for (gimple_stmt_iterator gsi
= gsi_start_bb (this_block
);
2345 if (stmt_can_throw_external (gsi_stmt (gsi
)))
2347 if (is_gimple_call (gsi_stmt (gsi
)))
2349 tree callee_t
= gimple_call_fndecl (gsi_stmt (gsi
));
2350 if (callee_t
&& recursive_call_p (current_function_decl
,
2357 fprintf (dump_file
, "Statement can throw: ");
2358 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0);
2364 node
->set_nothrow_flag (true);
2366 bool cfg_changed
= false;
2367 if (self_recursive_p (node
))
2368 FOR_EACH_BB_FN (this_block
, cfun
)
2369 if (gimple
*g
= last_stmt (this_block
))
2370 if (is_gimple_call (g
))
2372 tree callee_t
= gimple_call_fndecl (g
);
2374 && recursive_call_p (current_function_decl
, callee_t
)
2375 && maybe_clean_eh_stmt (g
)
2376 && gimple_purge_dead_eh_edges (this_block
))
2381 fprintf (dump_file
, "Function found to be nothrow: %s\n",
2382 current_function_name ());
2383 return cfg_changed
? TODO_cleanup_cfg
: 0;
2389 make_pass_nothrow (gcc::context
*ctxt
)
2391 return new pass_nothrow (ctxt
);