PR target/65871
[official-gcc.git] / gcc / ipa-pure-const.c
bloba4cdae9508b84704588d96d55e91e4837307de49
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "hash-set.h"
39 #include "machmode.h"
40 #include "vec.h"
41 #include "double-int.h"
42 #include "input.h"
43 #include "alias.h"
44 #include "symtab.h"
45 #include "wide-int.h"
46 #include "inchash.h"
47 #include "tree.h"
48 #include "fold-const.h"
49 #include "print-tree.h"
50 #include "calls.h"
51 #include "predict.h"
52 #include "hard-reg-set.h"
53 #include "input.h"
54 #include "function.h"
55 #include "dominance.h"
56 #include "cfg.h"
57 #include "cfganal.h"
58 #include "basic-block.h"
59 #include "tree-ssa-alias.h"
60 #include "internal-fn.h"
61 #include "tree-eh.h"
62 #include "gimple-expr.h"
63 #include "is-a.h"
64 #include "gimple.h"
65 #include "gimple-iterator.h"
66 #include "gimple-walk.h"
67 #include "tree-cfg.h"
68 #include "tree-ssa-loop-niter.h"
69 #include "tree-inline.h"
70 #include "tree-pass.h"
71 #include "langhooks.h"
72 #include "hash-map.h"
73 #include "plugin-api.h"
74 #include "ipa-ref.h"
75 #include "cgraph.h"
76 #include "ipa-utils.h"
77 #include "flags.h"
78 #include "diagnostic.h"
79 #include "gimple-pretty-print.h"
80 #include "langhooks.h"
81 #include "target.h"
82 #include "lto-streamer.h"
83 #include "data-streamer.h"
84 #include "tree-streamer.h"
85 #include "cfgloop.h"
86 #include "tree-scalar-evolution.h"
87 #include "intl.h"
88 #include "opts.h"
89 #include "varasm.h"
91 /* Lattice values for const and pure functions. Everything starts out
92 being const, then may drop to pure and then neither depending on
93 what is found. */
94 enum pure_const_state_e
96 IPA_CONST,
97 IPA_PURE,
98 IPA_NEITHER
101 const char *pure_const_names[3] = {"const", "pure", "neither"};
103 /* Holder for the const_state. There is one of these per function
104 decl. */
105 struct funct_state_d
107 /* See above. */
108 enum pure_const_state_e pure_const_state;
109 /* What user set here; we can be always sure about this. */
110 enum pure_const_state_e state_previously_known;
111 bool looping_previously_known;
113 /* True if the function could possibly infinite loop. There are a
114 lot of ways that this could be determined. We are pretty
115 conservative here. While it is possible to cse pure and const
116 calls, it is not legal to have dce get rid of the call if there
117 is a possibility that the call could infinite loop since this is
118 a behavioral change. */
119 bool looping;
121 bool can_throw;
123 /* If function can call free, munmap or otherwise make previously
124 non-trapping memory accesses trapping. */
125 bool can_free;
128 /* State used when we know nothing about function. */
129 static struct funct_state_d varying_state
130 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
133 typedef struct funct_state_d * funct_state;
135 /* The storage of the funct_state is abstracted because there is the
136 possibility that it may be desirable to move this to the cgraph
137 local info. */
139 /* Array, indexed by cgraph node uid, of function states. */
141 static vec<funct_state> funct_state_vec;
143 static bool gate_pure_const (void);
145 namespace {
147 const pass_data pass_data_ipa_pure_const =
149 IPA_PASS, /* type */
150 "pure-const", /* name */
151 OPTGROUP_NONE, /* optinfo_flags */
152 TV_IPA_PURE_CONST, /* tv_id */
153 0, /* properties_required */
154 0, /* properties_provided */
155 0, /* properties_destroyed */
156 0, /* todo_flags_start */
157 0, /* todo_flags_finish */
160 class pass_ipa_pure_const : public ipa_opt_pass_d
162 public:
163 pass_ipa_pure_const(gcc::context *ctxt);
165 /* opt_pass methods: */
166 bool gate (function *) { return gate_pure_const (); }
167 unsigned int execute (function *fun);
169 void register_hooks (void);
171 private:
172 bool init_p;
174 /* Holders of ipa cgraph hooks: */
175 struct cgraph_node_hook_list *function_insertion_hook_holder;
176 struct cgraph_2node_hook_list *node_duplication_hook_holder;
177 struct cgraph_node_hook_list *node_removal_hook_holder;
179 }; // class pass_ipa_pure_const
181 } // anon namespace
183 /* Try to guess if function body will always be visible to compiler
184 when compiling the call and whether compiler will be able
185 to propagate the information by itself. */
187 static bool
188 function_always_visible_to_compiler_p (tree decl)
190 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
193 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. The diagnostic is
195 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
196 OPTION, this function may initialize it and it is always returned
197 by the function. */
199 static hash_set<tree> *
200 suggest_attribute (int option, tree decl, bool known_finite,
201 hash_set<tree> *warned_about,
202 const char * attrib_name)
204 if (!option_enabled (option, &global_options))
205 return warned_about;
206 if (TREE_THIS_VOLATILE (decl)
207 || (known_finite && function_always_visible_to_compiler_p (decl)))
208 return warned_about;
210 if (!warned_about)
211 warned_about = new hash_set<tree>;
212 if (warned_about->contains (decl))
213 return warned_about;
214 warned_about->add (decl);
215 warning_at (DECL_SOURCE_LOCATION (decl),
216 option,
217 known_finite
218 ? _("function might be candidate for attribute %<%s%>")
219 : _("function might be candidate for attribute %<%s%>"
220 " if it is known to return normally"), attrib_name);
221 return warned_about;
224 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
225 is true if the function is known to be finite. */
227 static void
228 warn_function_pure (tree decl, bool known_finite)
230 static hash_set<tree> *warned_about;
232 warned_about
233 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
234 known_finite, warned_about, "pure");
237 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
238 is true if the function is known to be finite. */
240 static void
241 warn_function_const (tree decl, bool known_finite)
243 static hash_set<tree> *warned_about;
244 warned_about
245 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
246 known_finite, warned_about, "const");
249 static void
250 warn_function_noreturn (tree decl)
252 static hash_set<tree> *warned_about;
253 if (!lang_hooks.missing_noreturn_ok_p (decl)
254 && targetm.warn_func_return (decl))
255 warned_about
256 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
257 true, warned_about, "noreturn");
260 /* Return true if we have a function state for NODE. */
262 static inline bool
263 has_function_state (struct cgraph_node *node)
265 if (!funct_state_vec.exists ()
266 || funct_state_vec.length () <= (unsigned int)node->uid)
267 return false;
268 return funct_state_vec[node->uid] != NULL;
271 /* Return the function state from NODE. */
273 static inline funct_state
274 get_function_state (struct cgraph_node *node)
276 if (!funct_state_vec.exists ()
277 || funct_state_vec.length () <= (unsigned int)node->uid
278 || !funct_state_vec[node->uid])
279 /* We might want to put correct previously_known state into varying. */
280 return &varying_state;
281 return funct_state_vec[node->uid];
284 /* Set the function state S for NODE. */
286 static inline void
287 set_function_state (struct cgraph_node *node, funct_state s)
289 if (!funct_state_vec.exists ()
290 || funct_state_vec.length () <= (unsigned int)node->uid)
291 funct_state_vec.safe_grow_cleared (node->uid + 1);
292 funct_state_vec[node->uid] = s;
295 /* Check to see if the use (or definition when CHECKING_WRITE is true)
296 variable T is legal in a function that is either pure or const. */
298 static inline void
299 check_decl (funct_state local,
300 tree t, bool checking_write, bool ipa)
302 /* Do not want to do anything with volatile except mark any
303 function that uses one to be not const or pure. */
304 if (TREE_THIS_VOLATILE (t))
306 local->pure_const_state = IPA_NEITHER;
307 if (dump_file)
308 fprintf (dump_file, " Volatile operand is not const/pure");
309 return;
312 /* Do not care about a local automatic that is not static. */
313 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
314 return;
316 /* If the variable has the "used" attribute, treat it as if it had a
317 been touched by the devil. */
318 if (DECL_PRESERVE_P (t))
320 local->pure_const_state = IPA_NEITHER;
321 if (dump_file)
322 fprintf (dump_file, " Used static/global variable is not const/pure\n");
323 return;
326 /* In IPA mode we are not interested in checking actual loads and stores;
327 they will be processed at propagation time using ipa_ref. */
328 if (ipa)
329 return;
331 /* Since we have dealt with the locals and params cases above, if we
332 are CHECKING_WRITE, this cannot be a pure or constant
333 function. */
334 if (checking_write)
336 local->pure_const_state = IPA_NEITHER;
337 if (dump_file)
338 fprintf (dump_file, " static/global memory write is not const/pure\n");
339 return;
342 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
344 /* Readonly reads are safe. */
345 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
346 return; /* Read of a constant, do not change the function state. */
347 else
349 if (dump_file)
350 fprintf (dump_file, " global memory read is not const\n");
351 /* Just a regular read. */
352 if (local->pure_const_state == IPA_CONST)
353 local->pure_const_state = IPA_PURE;
356 else
358 /* Compilation level statics can be read if they are readonly
359 variables. */
360 if (TREE_READONLY (t))
361 return;
363 if (dump_file)
364 fprintf (dump_file, " static memory read is not const\n");
365 /* Just a regular read. */
366 if (local->pure_const_state == IPA_CONST)
367 local->pure_const_state = IPA_PURE;
372 /* Check to see if the use (or definition when CHECKING_WRITE is true)
373 variable T is legal in a function that is either pure or const. */
375 static inline void
376 check_op (funct_state local, tree t, bool checking_write)
378 t = get_base_address (t);
379 if (t && TREE_THIS_VOLATILE (t))
381 local->pure_const_state = IPA_NEITHER;
382 if (dump_file)
383 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
384 return;
386 else if (t
387 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
388 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
389 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
391 if (dump_file)
392 fprintf (dump_file, " Indirect ref to local memory is OK\n");
393 return;
395 else if (checking_write)
397 local->pure_const_state = IPA_NEITHER;
398 if (dump_file)
399 fprintf (dump_file, " Indirect ref write is not const/pure\n");
400 return;
402 else
404 if (dump_file)
405 fprintf (dump_file, " Indirect ref read is not const\n");
406 if (local->pure_const_state == IPA_CONST)
407 local->pure_const_state = IPA_PURE;
411 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
413 static void
414 state_from_flags (enum pure_const_state_e *state, bool *looping,
415 int flags, bool cannot_lead_to_return)
417 *looping = false;
418 if (flags & ECF_LOOPING_CONST_OR_PURE)
420 *looping = true;
421 if (dump_file && (dump_flags & TDF_DETAILS))
422 fprintf (dump_file, " looping");
424 if (flags & ECF_CONST)
426 *state = IPA_CONST;
427 if (dump_file && (dump_flags & TDF_DETAILS))
428 fprintf (dump_file, " const\n");
430 else if (flags & ECF_PURE)
432 *state = IPA_PURE;
433 if (dump_file && (dump_flags & TDF_DETAILS))
434 fprintf (dump_file, " pure\n");
436 else if (cannot_lead_to_return)
438 *state = IPA_PURE;
439 *looping = true;
440 if (dump_file && (dump_flags & TDF_DETAILS))
441 fprintf (dump_file, " ignoring side effects->pure looping\n");
443 else
445 if (dump_file && (dump_flags & TDF_DETAILS))
446 fprintf (dump_file, " neither\n");
447 *state = IPA_NEITHER;
448 *looping = true;
452 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
453 into STATE and LOOPING better of the two variants.
454 Be sure to merge looping correctly. IPA_NEITHER functions
455 have looping 0 even if they don't have to return. */
457 static inline void
458 better_state (enum pure_const_state_e *state, bool *looping,
459 enum pure_const_state_e state2, bool looping2)
461 if (state2 < *state)
463 if (*state == IPA_NEITHER)
464 *looping = looping2;
465 else
466 *looping = MIN (*looping, looping2);
467 *state = state2;
469 else if (state2 != IPA_NEITHER)
470 *looping = MIN (*looping, looping2);
473 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
474 into STATE and LOOPING worse of the two variants. */
476 static inline void
477 worse_state (enum pure_const_state_e *state, bool *looping,
478 enum pure_const_state_e state2, bool looping2)
480 *state = MAX (*state, state2);
481 *looping = MAX (*looping, looping2);
484 /* Recognize special cases of builtins that are by themselves not pure or const
485 but function using them is. */
486 static bool
487 special_builtin_state (enum pure_const_state_e *state, bool *looping,
488 tree callee)
490 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
491 switch (DECL_FUNCTION_CODE (callee))
493 case BUILT_IN_RETURN:
494 case BUILT_IN_UNREACHABLE:
495 case BUILT_IN_ALLOCA:
496 case BUILT_IN_ALLOCA_WITH_ALIGN:
497 case BUILT_IN_STACK_SAVE:
498 case BUILT_IN_STACK_RESTORE:
499 case BUILT_IN_EH_POINTER:
500 case BUILT_IN_EH_FILTER:
501 case BUILT_IN_UNWIND_RESUME:
502 case BUILT_IN_CXA_END_CLEANUP:
503 case BUILT_IN_EH_COPY_VALUES:
504 case BUILT_IN_FRAME_ADDRESS:
505 case BUILT_IN_APPLY:
506 case BUILT_IN_APPLY_ARGS:
507 *looping = false;
508 *state = IPA_CONST;
509 return true;
510 case BUILT_IN_PREFETCH:
511 *looping = true;
512 *state = IPA_CONST;
513 return true;
514 default:
515 break;
517 return false;
520 /* Check the parameters of a function call to CALL_EXPR to see if
521 there are any references in the parameters that are not allowed for
522 pure or const functions. Also check to see if this is either an
523 indirect call, a call outside the compilation unit, or has special
524 attributes that may also effect the purity. The CALL_EXPR node for
525 the entire call expression. */
527 static void
528 check_call (funct_state local, gcall *call, bool ipa)
530 int flags = gimple_call_flags (call);
531 tree callee_t = gimple_call_fndecl (call);
532 bool possibly_throws = stmt_could_throw_p (call);
533 bool possibly_throws_externally = (possibly_throws
534 && stmt_can_throw_external (call));
536 if (possibly_throws)
538 unsigned int i;
539 for (i = 0; i < gimple_num_ops (call); i++)
540 if (gimple_op (call, i)
541 && tree_could_throw_p (gimple_op (call, i)))
543 if (possibly_throws && cfun->can_throw_non_call_exceptions)
545 if (dump_file)
546 fprintf (dump_file, " operand can throw; looping\n");
547 local->looping = true;
549 if (possibly_throws_externally)
551 if (dump_file)
552 fprintf (dump_file, " operand can throw externally\n");
553 local->can_throw = true;
558 /* The const and pure flags are set by a variety of places in the
559 compiler (including here). If someone has already set the flags
560 for the callee, (such as for some of the builtins) we will use
561 them, otherwise we will compute our own information.
563 Const and pure functions have less clobber effects than other
564 functions so we process these first. Otherwise if it is a call
565 outside the compilation unit or an indirect call we punt. This
566 leaves local calls which will be processed by following the call
567 graph. */
568 if (callee_t)
570 enum pure_const_state_e call_state;
571 bool call_looping;
573 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
574 && !nonfreeing_call_p (call))
575 local->can_free = true;
577 if (special_builtin_state (&call_state, &call_looping, callee_t))
579 worse_state (&local->pure_const_state, &local->looping,
580 call_state, call_looping);
581 return;
583 /* When bad things happen to bad functions, they cannot be const
584 or pure. */
585 if (setjmp_call_p (callee_t))
587 if (dump_file)
588 fprintf (dump_file, " setjmp is not const/pure\n");
589 local->looping = true;
590 local->pure_const_state = IPA_NEITHER;
593 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
594 switch (DECL_FUNCTION_CODE (callee_t))
596 case BUILT_IN_LONGJMP:
597 case BUILT_IN_NONLOCAL_GOTO:
598 if (dump_file)
599 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
600 local->pure_const_state = IPA_NEITHER;
601 local->looping = true;
602 break;
603 default:
604 break;
607 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
608 local->can_free = true;
610 /* When not in IPA mode, we can still handle self recursion. */
611 if (!ipa && callee_t
612 && recursive_call_p (current_function_decl, callee_t))
614 if (dump_file)
615 fprintf (dump_file, " Recursive call can loop.\n");
616 local->looping = true;
618 /* Either callee is unknown or we are doing local analysis.
619 Look to see if there are any bits available for the callee (such as by
620 declaration or because it is builtin) and process solely on the basis of
621 those bits. */
622 else if (!ipa)
624 enum pure_const_state_e call_state;
625 bool call_looping;
626 if (possibly_throws && cfun->can_throw_non_call_exceptions)
628 if (dump_file)
629 fprintf (dump_file, " can throw; looping\n");
630 local->looping = true;
632 if (possibly_throws_externally)
634 if (dump_file)
636 fprintf (dump_file, " can throw externally to lp %i\n",
637 lookup_stmt_eh_lp (call));
638 if (callee_t)
639 fprintf (dump_file, " callee:%s\n",
640 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
642 local->can_throw = true;
644 if (dump_file && (dump_flags & TDF_DETAILS))
645 fprintf (dump_file, " checking flags for call:");
646 state_from_flags (&call_state, &call_looping, flags,
647 ((flags & (ECF_NORETURN | ECF_NOTHROW))
648 == (ECF_NORETURN | ECF_NOTHROW))
649 || (!flag_exceptions && (flags & ECF_NORETURN)));
650 worse_state (&local->pure_const_state, &local->looping,
651 call_state, call_looping);
653 /* Direct functions calls are handled by IPA propagation. */
656 /* Wrapper around check_decl for loads in local more. */
658 static bool
659 check_load (gimple, tree op, tree, void *data)
661 if (DECL_P (op))
662 check_decl ((funct_state)data, op, false, false);
663 else
664 check_op ((funct_state)data, op, false);
665 return false;
668 /* Wrapper around check_decl for stores in local more. */
670 static bool
671 check_store (gimple, tree op, tree, void *data)
673 if (DECL_P (op))
674 check_decl ((funct_state)data, op, true, false);
675 else
676 check_op ((funct_state)data, op, true);
677 return false;
680 /* Wrapper around check_decl for loads in ipa mode. */
682 static bool
683 check_ipa_load (gimple, tree op, tree, void *data)
685 if (DECL_P (op))
686 check_decl ((funct_state)data, op, false, true);
687 else
688 check_op ((funct_state)data, op, false);
689 return false;
692 /* Wrapper around check_decl for stores in ipa mode. */
694 static bool
695 check_ipa_store (gimple, tree op, tree, void *data)
697 if (DECL_P (op))
698 check_decl ((funct_state)data, op, true, true);
699 else
700 check_op ((funct_state)data, op, true);
701 return false;
704 /* Look into pointer pointed to by GSIP and figure out what interesting side
705 effects it has. */
706 static void
707 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
709 gimple stmt = gsi_stmt (*gsip);
711 if (is_gimple_debug (stmt))
712 return;
714 /* Do consider clobber as side effects before IPA, so we rather inline
715 C++ destructors and keep clobber semantics than eliminate them.
717 TODO: We may get smarter during early optimizations on these and let
718 functions containing only clobbers to be optimized more. This is a common
719 case of C++ destructors. */
721 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
722 return;
724 if (dump_file)
726 fprintf (dump_file, " scanning: ");
727 print_gimple_stmt (dump_file, stmt, 0, 0);
730 if (gimple_has_volatile_ops (stmt)
731 && !gimple_clobber_p (stmt))
733 local->pure_const_state = IPA_NEITHER;
734 if (dump_file)
735 fprintf (dump_file, " Volatile stmt is not const/pure\n");
738 /* Look for loads and stores. */
739 walk_stmt_load_store_ops (stmt, local,
740 ipa ? check_ipa_load : check_load,
741 ipa ? check_ipa_store : check_store);
743 if (gimple_code (stmt) != GIMPLE_CALL
744 && stmt_could_throw_p (stmt))
746 if (cfun->can_throw_non_call_exceptions)
748 if (dump_file)
749 fprintf (dump_file, " can throw; looping\n");
750 local->looping = true;
752 if (stmt_can_throw_external (stmt))
754 if (dump_file)
755 fprintf (dump_file, " can throw externally\n");
756 local->can_throw = true;
758 else
759 if (dump_file)
760 fprintf (dump_file, " can throw\n");
762 switch (gimple_code (stmt))
764 case GIMPLE_CALL:
765 check_call (local, as_a <gcall *> (stmt), ipa);
766 break;
767 case GIMPLE_LABEL:
768 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
769 /* Target of long jump. */
771 if (dump_file)
772 fprintf (dump_file, " nonlocal label is not const/pure\n");
773 local->pure_const_state = IPA_NEITHER;
775 break;
776 case GIMPLE_ASM:
777 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
779 if (dump_file)
780 fprintf (dump_file, " memory asm clobber is not const/pure\n");
781 /* Abandon all hope, ye who enter here. */
782 local->pure_const_state = IPA_NEITHER;
783 local->can_free = true;
785 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
787 if (dump_file)
788 fprintf (dump_file, " volatile is not const/pure\n");
789 /* Abandon all hope, ye who enter here. */
790 local->pure_const_state = IPA_NEITHER;
791 local->looping = true;
792 local->can_free = true;
794 return;
795 default:
796 break;
801 /* This is the main routine for finding the reference patterns for
802 global variables within a function FN. */
804 static funct_state
805 analyze_function (struct cgraph_node *fn, bool ipa)
807 tree decl = fn->decl;
808 funct_state l;
809 basic_block this_block;
811 l = XCNEW (struct funct_state_d);
812 l->pure_const_state = IPA_CONST;
813 l->state_previously_known = IPA_NEITHER;
814 l->looping_previously_known = true;
815 l->looping = false;
816 l->can_throw = false;
817 l->can_free = false;
818 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
819 flags_from_decl_or_type (fn->decl),
820 fn->cannot_return_p ());
822 if (fn->thunk.thunk_p || fn->alias)
824 /* Thunk gets propagated through, so nothing interesting happens. */
825 gcc_assert (ipa);
826 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
827 l->pure_const_state = IPA_NEITHER;
828 return l;
831 if (dump_file)
833 fprintf (dump_file, "\n\n local analysis of %s\n ",
834 fn->name ());
837 push_cfun (DECL_STRUCT_FUNCTION (decl));
839 FOR_EACH_BB_FN (this_block, cfun)
841 gimple_stmt_iterator gsi;
842 struct walk_stmt_info wi;
844 memset (&wi, 0, sizeof (wi));
845 for (gsi = gsi_start_bb (this_block);
846 !gsi_end_p (gsi);
847 gsi_next (&gsi))
849 check_stmt (&gsi, l, ipa);
850 if (l->pure_const_state == IPA_NEITHER
851 && l->looping
852 && l->can_throw
853 && l->can_free)
854 goto end;
858 end:
859 if (l->pure_const_state != IPA_NEITHER)
861 /* Const functions cannot have back edges (an
862 indication of possible infinite loop side
863 effect. */
864 if (mark_dfs_back_edges ())
866 /* Preheaders are needed for SCEV to work.
867 Simple latches and recorded exits improve chances that loop will
868 proved to be finite in testcases such as in loop-15.c
869 and loop-24.c */
870 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
871 | LOOPS_HAVE_SIMPLE_LATCHES
872 | LOOPS_HAVE_RECORDED_EXITS);
873 if (dump_file && (dump_flags & TDF_DETAILS))
874 flow_loops_dump (dump_file, NULL, 0);
875 if (mark_irreducible_loops ())
877 if (dump_file)
878 fprintf (dump_file, " has irreducible loops\n");
879 l->looping = true;
881 else
883 struct loop *loop;
884 scev_initialize ();
885 FOR_EACH_LOOP (loop, 0)
886 if (!finite_loop_p (loop))
888 if (dump_file)
889 fprintf (dump_file, " can not prove finiteness of "
890 "loop %i\n", loop->num);
891 l->looping =true;
892 break;
894 scev_finalize ();
896 loop_optimizer_finalize ();
900 if (dump_file && (dump_flags & TDF_DETAILS))
901 fprintf (dump_file, " checking previously known:");
903 better_state (&l->pure_const_state, &l->looping,
904 l->state_previously_known,
905 l->looping_previously_known);
906 if (TREE_NOTHROW (decl))
907 l->can_throw = false;
909 pop_cfun ();
910 if (dump_file)
912 if (l->looping)
913 fprintf (dump_file, "Function is locally looping.\n");
914 if (l->can_throw)
915 fprintf (dump_file, "Function is locally throwing.\n");
916 if (l->pure_const_state == IPA_CONST)
917 fprintf (dump_file, "Function is locally const.\n");
918 if (l->pure_const_state == IPA_PURE)
919 fprintf (dump_file, "Function is locally pure.\n");
920 if (l->can_free)
921 fprintf (dump_file, "Function can locally free.\n");
923 return l;
926 /* Called when new function is inserted to callgraph late. */
927 static void
928 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
930 if (node->get_availability () < AVAIL_INTERPOSABLE)
931 return;
932 /* There are some shared nodes, in particular the initializers on
933 static declarations. We do not need to scan them more than once
934 since all we would be interested in are the addressof
935 operations. */
936 if (node->get_availability () > AVAIL_INTERPOSABLE
937 && opt_for_fn (node->decl, flag_ipa_pure_const))
938 set_function_state (node, analyze_function (node, true));
941 /* Called when new clone is inserted to callgraph late. */
943 static void
944 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
945 void *data ATTRIBUTE_UNUSED)
947 if (has_function_state (src))
949 funct_state l = XNEW (struct funct_state_d);
950 gcc_assert (!has_function_state (dst));
951 memcpy (l, get_function_state (src), sizeof (*l));
952 set_function_state (dst, l);
956 /* Called when new clone is inserted to callgraph late. */
958 static void
959 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
961 if (has_function_state (node))
963 funct_state l = get_function_state (node);
964 if (l != &varying_state)
965 free (l);
966 set_function_state (node, NULL);
971 void
972 pass_ipa_pure_const::
973 register_hooks (void)
975 if (init_p)
976 return;
978 init_p = true;
980 node_removal_hook_holder =
981 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
982 node_duplication_hook_holder =
983 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
984 function_insertion_hook_holder =
985 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
989 /* Analyze each function in the cgraph to see if it is locally PURE or
990 CONST. */
992 static void
993 pure_const_generate_summary (void)
995 struct cgraph_node *node;
997 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
998 pass->register_hooks ();
1000 /* Process all of the functions.
1002 We process AVAIL_INTERPOSABLE functions. We can not use the results
1003 by default, but the info can be used at LTO with -fwhole-program or
1004 when function got cloned and the clone is AVAILABLE. */
1006 FOR_EACH_DEFINED_FUNCTION (node)
1007 if (node->get_availability () >= AVAIL_INTERPOSABLE
1008 && opt_for_fn (node->decl, flag_ipa_pure_const))
1009 set_function_state (node, analyze_function (node, true));
1013 /* Serialize the ipa info for lto. */
1015 static void
1016 pure_const_write_summary (void)
1018 struct cgraph_node *node;
1019 struct lto_simple_output_block *ob
1020 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1021 unsigned int count = 0;
1022 lto_symtab_encoder_iterator lsei;
1023 lto_symtab_encoder_t encoder;
1025 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1027 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1028 lsei_next_function_in_partition (&lsei))
1030 node = lsei_cgraph_node (lsei);
1031 if (node->definition && has_function_state (node))
1032 count++;
1035 streamer_write_uhwi_stream (ob->main_stream, count);
1037 /* Process all of the functions. */
1038 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1039 lsei_next_function_in_partition (&lsei))
1041 node = lsei_cgraph_node (lsei);
1042 if (node->definition && has_function_state (node))
1044 struct bitpack_d bp;
1045 funct_state fs;
1046 int node_ref;
1047 lto_symtab_encoder_t encoder;
1049 fs = get_function_state (node);
1051 encoder = ob->decl_state->symtab_node_encoder;
1052 node_ref = lto_symtab_encoder_encode (encoder, node);
1053 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1055 /* Note that flags will need to be read in the opposite
1056 order as we are pushing the bitflags into FLAGS. */
1057 bp = bitpack_create (ob->main_stream);
1058 bp_pack_value (&bp, fs->pure_const_state, 2);
1059 bp_pack_value (&bp, fs->state_previously_known, 2);
1060 bp_pack_value (&bp, fs->looping_previously_known, 1);
1061 bp_pack_value (&bp, fs->looping, 1);
1062 bp_pack_value (&bp, fs->can_throw, 1);
1063 bp_pack_value (&bp, fs->can_free, 1);
1064 streamer_write_bitpack (&bp);
1068 lto_destroy_simple_output_block (ob);
1072 /* Deserialize the ipa info for lto. */
1074 static void
1075 pure_const_read_summary (void)
1077 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1078 struct lto_file_decl_data *file_data;
1079 unsigned int j = 0;
1081 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1082 pass->register_hooks ();
1084 while ((file_data = file_data_vec[j++]))
1086 const char *data;
1087 size_t len;
1088 struct lto_input_block *ib
1089 = lto_create_simple_input_block (file_data,
1090 LTO_section_ipa_pure_const,
1091 &data, &len);
1092 if (ib)
1094 unsigned int i;
1095 unsigned int count = streamer_read_uhwi (ib);
1097 for (i = 0; i < count; i++)
1099 unsigned int index;
1100 struct cgraph_node *node;
1101 struct bitpack_d bp;
1102 funct_state fs;
1103 lto_symtab_encoder_t encoder;
1105 fs = XCNEW (struct funct_state_d);
1106 index = streamer_read_uhwi (ib);
1107 encoder = file_data->symtab_node_encoder;
1108 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1109 index));
1110 set_function_state (node, fs);
1112 /* Note that the flags must be read in the opposite
1113 order in which they were written (the bitflags were
1114 pushed into FLAGS). */
1115 bp = streamer_read_bitpack (ib);
1116 fs->pure_const_state
1117 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1118 fs->state_previously_known
1119 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1120 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1121 fs->looping = bp_unpack_value (&bp, 1);
1122 fs->can_throw = bp_unpack_value (&bp, 1);
1123 fs->can_free = bp_unpack_value (&bp, 1);
1124 if (dump_file)
1126 int flags = flags_from_decl_or_type (node->decl);
1127 fprintf (dump_file, "Read info for %s/%i ",
1128 node->name (),
1129 node->order);
1130 if (flags & ECF_CONST)
1131 fprintf (dump_file, " const");
1132 if (flags & ECF_PURE)
1133 fprintf (dump_file, " pure");
1134 if (flags & ECF_NOTHROW)
1135 fprintf (dump_file, " nothrow");
1136 fprintf (dump_file, "\n pure const state: %s\n",
1137 pure_const_names[fs->pure_const_state]);
1138 fprintf (dump_file, " previously known state: %s\n",
1139 pure_const_names[fs->looping_previously_known]);
1140 if (fs->looping)
1141 fprintf (dump_file," function is locally looping\n");
1142 if (fs->looping_previously_known)
1143 fprintf (dump_file," function is previously known looping\n");
1144 if (fs->can_throw)
1145 fprintf (dump_file," function is locally throwing\n");
1146 if (fs->can_free)
1147 fprintf (dump_file," function can locally free\n");
1151 lto_destroy_simple_input_block (file_data,
1152 LTO_section_ipa_pure_const,
1153 ib, data, len);
1159 static bool
1160 ignore_edge (struct cgraph_edge *e)
1162 return (!e->can_throw_external);
1165 /* Return true if NODE is self recursive function.
1166 Indirectly recursive functions appears as non-trivial strongly
1167 connected components, so we need to care about self recursion
1168 only. */
1170 static bool
1171 self_recursive_p (struct cgraph_node *node)
1173 struct cgraph_edge *e;
1174 for (e = node->callees; e; e = e->next_callee)
1175 if (e->callee->function_symbol () == node)
1176 return true;
1177 return false;
1180 /* Return true if N is cdtor that is not const or pure. In this case we may
1181 need to remove unreachable function if it is marked const/pure. */
1183 static bool
1184 cdtor_p (cgraph_node *n, void *)
1186 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1187 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1188 return false;
1191 /* Produce transitive closure over the callgraph and compute pure/const
1192 attributes. */
1194 static bool
1195 propagate_pure_const (void)
1197 struct cgraph_node *node;
1198 struct cgraph_node *w;
1199 struct cgraph_node **order =
1200 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1201 int order_pos;
1202 int i;
1203 struct ipa_dfs_info * w_info;
1204 bool remove_p = false;
1206 order_pos = ipa_reduced_postorder (order, true, false, NULL);
1207 if (dump_file)
1209 cgraph_node::dump_cgraph (dump_file);
1210 ipa_print_order (dump_file, "reduced", order, order_pos);
1213 /* Propagate the local information through the call graph to produce
1214 the global information. All the nodes within a cycle will have
1215 the same info so we collapse cycles first. Then we can do the
1216 propagation in one pass from the leaves to the roots. */
1217 for (i = 0; i < order_pos; i++ )
1219 enum pure_const_state_e pure_const_state = IPA_CONST;
1220 bool looping = false;
1221 int count = 0;
1222 node = order[i];
1224 if (node->alias)
1225 continue;
1227 if (dump_file && (dump_flags & TDF_DETAILS))
1228 fprintf (dump_file, "Starting cycle\n");
1230 /* Find the worst state for any node in the cycle. */
1231 w = node;
1232 while (w && pure_const_state != IPA_NEITHER)
1234 struct cgraph_edge *e;
1235 struct cgraph_edge *ie;
1236 int i;
1237 struct ipa_ref *ref = NULL;
1239 funct_state w_l = get_function_state (w);
1240 if (dump_file && (dump_flags & TDF_DETAILS))
1241 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1242 w->name (),
1243 w->order,
1244 pure_const_names[w_l->pure_const_state],
1245 w_l->looping);
1247 /* First merge in function body properties. */
1248 worse_state (&pure_const_state, &looping,
1249 w_l->pure_const_state, w_l->looping);
1250 if (pure_const_state == IPA_NEITHER)
1251 break;
1253 /* For overwritable nodes we can not assume anything. */
1254 if (w->get_availability () == AVAIL_INTERPOSABLE)
1256 worse_state (&pure_const_state, &looping,
1257 w_l->state_previously_known,
1258 w_l->looping_previously_known);
1259 if (dump_file && (dump_flags & TDF_DETAILS))
1261 fprintf (dump_file,
1262 " Overwritable. state %s looping %i\n",
1263 pure_const_names[w_l->state_previously_known],
1264 w_l->looping_previously_known);
1266 break;
1269 count++;
1271 /* We consider recursive cycles as possibly infinite.
1272 This might be relaxed since infinite recursion leads to stack
1273 overflow. */
1274 if (count > 1)
1275 looping = true;
1277 /* Now walk the edges and merge in callee properties. */
1278 for (e = w->callees; e; e = e->next_callee)
1280 enum availability avail;
1281 struct cgraph_node *y = e->callee->
1282 function_or_virtual_thunk_symbol (&avail);
1283 enum pure_const_state_e edge_state = IPA_CONST;
1284 bool edge_looping = false;
1286 if (dump_file && (dump_flags & TDF_DETAILS))
1288 fprintf (dump_file,
1289 " Call to %s/%i",
1290 e->callee->name (),
1291 e->callee->order);
1293 if (avail > AVAIL_INTERPOSABLE)
1295 funct_state y_l = get_function_state (y);
1296 if (dump_file && (dump_flags & TDF_DETAILS))
1298 fprintf (dump_file,
1299 " state:%s looping:%i\n",
1300 pure_const_names[y_l->pure_const_state],
1301 y_l->looping);
1303 if (y_l->pure_const_state > IPA_PURE
1304 && e->cannot_lead_to_return_p ())
1306 if (dump_file && (dump_flags & TDF_DETAILS))
1307 fprintf (dump_file,
1308 " Ignoring side effects"
1309 " -> pure, looping\n");
1310 edge_state = IPA_PURE;
1311 edge_looping = true;
1313 else
1315 edge_state = y_l->pure_const_state;
1316 edge_looping = y_l->looping;
1319 else if (special_builtin_state (&edge_state, &edge_looping,
1320 y->decl))
1322 else
1323 state_from_flags (&edge_state, &edge_looping,
1324 flags_from_decl_or_type (y->decl),
1325 e->cannot_lead_to_return_p ());
1327 /* Merge the results with what we already know. */
1328 better_state (&edge_state, &edge_looping,
1329 w_l->state_previously_known,
1330 w_l->looping_previously_known);
1331 worse_state (&pure_const_state, &looping,
1332 edge_state, edge_looping);
1333 if (pure_const_state == IPA_NEITHER)
1334 break;
1336 if (pure_const_state == IPA_NEITHER)
1337 break;
1339 /* Now process the indirect call. */
1340 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1342 enum pure_const_state_e edge_state = IPA_CONST;
1343 bool edge_looping = false;
1345 if (dump_file && (dump_flags & TDF_DETAILS))
1346 fprintf (dump_file, " Indirect call");
1347 state_from_flags (&edge_state, &edge_looping,
1348 ie->indirect_info->ecf_flags,
1349 ie->cannot_lead_to_return_p ());
1350 /* Merge the results with what we already know. */
1351 better_state (&edge_state, &edge_looping,
1352 w_l->state_previously_known,
1353 w_l->looping_previously_known);
1354 worse_state (&pure_const_state, &looping,
1355 edge_state, edge_looping);
1356 if (pure_const_state == IPA_NEITHER)
1357 break;
1359 if (pure_const_state == IPA_NEITHER)
1360 break;
1362 /* And finally all loads and stores. */
1363 for (i = 0; w->iterate_reference (i, ref); i++)
1365 enum pure_const_state_e ref_state = IPA_CONST;
1366 bool ref_looping = false;
1367 switch (ref->use)
1369 case IPA_REF_LOAD:
1370 /* readonly reads are safe. */
1371 if (TREE_READONLY (ref->referred->decl))
1372 break;
1373 if (dump_file && (dump_flags & TDF_DETAILS))
1374 fprintf (dump_file, " nonreadonly global var read\n");
1375 ref_state = IPA_PURE;
1376 break;
1377 case IPA_REF_STORE:
1378 if (ref->cannot_lead_to_return ())
1379 break;
1380 ref_state = IPA_NEITHER;
1381 if (dump_file && (dump_flags & TDF_DETAILS))
1382 fprintf (dump_file, " global var write\n");
1383 break;
1384 case IPA_REF_ADDR:
1385 case IPA_REF_CHKP:
1386 break;
1387 default:
1388 gcc_unreachable ();
1390 better_state (&ref_state, &ref_looping,
1391 w_l->state_previously_known,
1392 w_l->looping_previously_known);
1393 worse_state (&pure_const_state, &looping,
1394 ref_state, ref_looping);
1395 if (pure_const_state == IPA_NEITHER)
1396 break;
1398 w_info = (struct ipa_dfs_info *) w->aux;
1399 w = w_info->next_cycle;
1401 if (dump_file && (dump_flags & TDF_DETAILS))
1402 fprintf (dump_file, "Result %s looping %i\n",
1403 pure_const_names [pure_const_state],
1404 looping);
1406 /* Find the worst state of can_free for any node in the cycle. */
1407 bool can_free = false;
1408 w = node;
1409 while (w && !can_free)
1411 struct cgraph_edge *e;
1412 funct_state w_l = get_function_state (w);
1414 if (w_l->can_free
1415 || w->get_availability () == AVAIL_INTERPOSABLE
1416 || w->indirect_calls)
1417 can_free = true;
1419 for (e = w->callees; e && !can_free; e = e->next_callee)
1421 enum availability avail;
1422 struct cgraph_node *y = e->callee->
1423 function_or_virtual_thunk_symbol (&avail);
1425 if (avail > AVAIL_INTERPOSABLE)
1426 can_free = get_function_state (y)->can_free;
1427 else
1428 can_free = true;
1430 w_info = (struct ipa_dfs_info *) w->aux;
1431 w = w_info->next_cycle;
1434 /* Copy back the region's pure_const_state which is shared by
1435 all nodes in the region. */
1436 w = node;
1437 while (w)
1439 funct_state w_l = get_function_state (w);
1440 enum pure_const_state_e this_state = pure_const_state;
1441 bool this_looping = looping;
1443 w_l->can_free = can_free;
1444 w->nonfreeing_fn = !can_free;
1445 if (!can_free && dump_file)
1446 fprintf (dump_file, "Function found not to call free: %s\n",
1447 w->name ());
1449 if (w_l->state_previously_known != IPA_NEITHER
1450 && this_state > w_l->state_previously_known)
1452 this_state = w_l->state_previously_known;
1453 this_looping |= w_l->looping_previously_known;
1455 if (!this_looping && self_recursive_p (w))
1456 this_looping = true;
1457 if (!w_l->looping_previously_known)
1458 this_looping = false;
1460 /* All nodes within a cycle share the same info. */
1461 w_l->pure_const_state = this_state;
1462 w_l->looping = this_looping;
1464 /* Inline clones share declaration with their offline copies;
1465 do not modify their declarations since the offline copy may
1466 be different. */
1467 if (!w->global.inlined_to)
1468 switch (this_state)
1470 case IPA_CONST:
1471 if (!TREE_READONLY (w->decl))
1473 warn_function_const (w->decl, !this_looping);
1474 if (dump_file)
1475 fprintf (dump_file, "Function found to be %sconst: %s\n",
1476 this_looping ? "looping " : "",
1477 w->name ());
1479 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1480 NULL, true);
1481 w->set_const_flag (true, this_looping);
1482 break;
1484 case IPA_PURE:
1485 if (!DECL_PURE_P (w->decl))
1487 warn_function_pure (w->decl, !this_looping);
1488 if (dump_file)
1489 fprintf (dump_file, "Function found to be %spure: %s\n",
1490 this_looping ? "looping " : "",
1491 w->name ());
1493 remove_p |= w->call_for_symbol_and_aliases (cdtor_p,
1494 NULL, true);
1495 w->set_pure_flag (true, this_looping);
1496 break;
1498 default:
1499 break;
1501 w_info = (struct ipa_dfs_info *) w->aux;
1502 w = w_info->next_cycle;
1506 ipa_free_postorder_info ();
1507 free (order);
1508 return remove_p;
1511 /* Produce transitive closure over the callgraph and compute nothrow
1512 attributes. */
1514 static void
1515 propagate_nothrow (void)
1517 struct cgraph_node *node;
1518 struct cgraph_node *w;
1519 struct cgraph_node **order =
1520 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1521 int order_pos;
1522 int i;
1523 struct ipa_dfs_info * w_info;
1525 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1526 if (dump_file)
1528 cgraph_node::dump_cgraph (dump_file);
1529 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1532 /* Propagate the local information through the call graph to produce
1533 the global information. All the nodes within a cycle will have
1534 the same info so we collapse cycles first. Then we can do the
1535 propagation in one pass from the leaves to the roots. */
1536 for (i = 0; i < order_pos; i++ )
1538 bool can_throw = false;
1539 node = order[i];
1541 if (node->alias)
1542 continue;
1544 /* Find the worst state for any node in the cycle. */
1545 w = node;
1546 while (w && !can_throw)
1548 struct cgraph_edge *e, *ie;
1549 funct_state w_l = get_function_state (w);
1551 if (w_l->can_throw
1552 || w->get_availability () == AVAIL_INTERPOSABLE)
1553 can_throw = true;
1555 for (e = w->callees; e && !can_throw; e = e->next_callee)
1557 enum availability avail;
1558 struct cgraph_node *y = e->callee->
1559 function_or_virtual_thunk_symbol (&avail);
1561 if (avail > AVAIL_INTERPOSABLE)
1563 funct_state y_l = get_function_state (y);
1565 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1566 && e->can_throw_external)
1567 can_throw = true;
1569 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1570 can_throw = true;
1572 for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
1573 if (ie->can_throw_external)
1574 can_throw = true;
1575 w_info = (struct ipa_dfs_info *) w->aux;
1576 w = w_info->next_cycle;
1579 /* Copy back the region's pure_const_state which is shared by
1580 all nodes in the region. */
1581 w = node;
1582 while (w)
1584 funct_state w_l = get_function_state (w);
1585 if (!can_throw && !TREE_NOTHROW (w->decl))
1587 /* Inline clones share declaration with their offline copies;
1588 do not modify their declarations since the offline copy may
1589 be different. */
1590 if (!w->global.inlined_to)
1592 w->set_nothrow_flag (true);
1593 if (dump_file)
1594 fprintf (dump_file, "Function found to be nothrow: %s\n",
1595 w->name ());
1598 else if (can_throw && !TREE_NOTHROW (w->decl))
1599 w_l->can_throw = true;
1600 w_info = (struct ipa_dfs_info *) w->aux;
1601 w = w_info->next_cycle;
1605 ipa_free_postorder_info ();
1606 free (order);
1610 /* Produce the global information by preforming a transitive closure
1611 on the local information that was produced by generate_summary. */
1613 unsigned int
1614 pass_ipa_pure_const::
1615 execute (function *)
1617 struct cgraph_node *node;
1618 bool remove_p;
1620 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1621 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1622 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1624 /* Nothrow makes more function to not lead to return and improve
1625 later analysis. */
1626 propagate_nothrow ();
1627 remove_p = propagate_pure_const ();
1629 /* Cleanup. */
1630 FOR_EACH_FUNCTION (node)
1631 if (has_function_state (node))
1632 free (get_function_state (node));
1633 funct_state_vec.release ();
1634 return remove_p ? TODO_remove_functions : 0;
1637 static bool
1638 gate_pure_const (void)
1640 return flag_ipa_pure_const || in_lto_p;
1643 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1644 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1645 pure_const_generate_summary, /* generate_summary */
1646 pure_const_write_summary, /* write_summary */
1647 pure_const_read_summary, /* read_summary */
1648 NULL, /* write_optimization_summary */
1649 NULL, /* read_optimization_summary */
1650 NULL, /* stmt_fixup */
1651 0, /* function_transform_todo_flags_start */
1652 NULL, /* function_transform */
1653 NULL), /* variable_transform */
1654 init_p(false),
1655 function_insertion_hook_holder(NULL),
1656 node_duplication_hook_holder(NULL),
1657 node_removal_hook_holder(NULL)
1661 ipa_opt_pass_d *
1662 make_pass_ipa_pure_const (gcc::context *ctxt)
1664 return new pass_ipa_pure_const (ctxt);
1667 /* Return true if function should be skipped for local pure const analysis. */
1669 static bool
1670 skip_function_for_local_pure_const (struct cgraph_node *node)
1672 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1673 we must not promote functions that are called by already processed functions. */
1675 if (function_called_by_processed_nodes_p ())
1677 if (dump_file)
1678 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1679 return true;
1681 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1683 if (dump_file)
1684 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1685 return true;
1687 return false;
1690 /* Simple local pass for pure const discovery reusing the analysis from
1691 ipa_pure_const. This pass is effective when executed together with
1692 other optimization passes in early optimization pass queue. */
1694 namespace {
1696 const pass_data pass_data_local_pure_const =
1698 GIMPLE_PASS, /* type */
1699 "local-pure-const", /* name */
1700 OPTGROUP_NONE, /* optinfo_flags */
1701 TV_IPA_PURE_CONST, /* tv_id */
1702 0, /* properties_required */
1703 0, /* properties_provided */
1704 0, /* properties_destroyed */
1705 0, /* todo_flags_start */
1706 0, /* todo_flags_finish */
1709 class pass_local_pure_const : public gimple_opt_pass
1711 public:
1712 pass_local_pure_const (gcc::context *ctxt)
1713 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1716 /* opt_pass methods: */
1717 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1718 virtual bool gate (function *) { return gate_pure_const (); }
1719 virtual unsigned int execute (function *);
1721 }; // class pass_local_pure_const
1723 unsigned int
1724 pass_local_pure_const::execute (function *fun)
1726 bool changed = false;
1727 funct_state l;
1728 bool skip;
1729 struct cgraph_node *node;
1731 node = cgraph_node::get (current_function_decl);
1732 skip = skip_function_for_local_pure_const (node);
1733 if (!warn_suggest_attribute_const
1734 && !warn_suggest_attribute_pure
1735 && skip)
1736 return 0;
1738 l = analyze_function (node, false);
1740 /* Do NORETURN discovery. */
1741 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1742 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1744 warn_function_noreturn (fun->decl);
1745 if (dump_file)
1746 fprintf (dump_file, "Function found to be noreturn: %s\n",
1747 current_function_name ());
1749 /* Update declaration and reduce profile to executed once. */
1750 TREE_THIS_VOLATILE (current_function_decl) = 1;
1751 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1752 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1754 changed = true;
1757 switch (l->pure_const_state)
1759 case IPA_CONST:
1760 if (!TREE_READONLY (current_function_decl))
1762 warn_function_const (current_function_decl, !l->looping);
1763 if (!skip)
1765 node->set_const_flag (true, l->looping);
1766 changed = true;
1768 if (dump_file)
1769 fprintf (dump_file, "Function found to be %sconst: %s\n",
1770 l->looping ? "looping " : "",
1771 current_function_name ());
1773 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1774 && !l->looping)
1776 if (!skip)
1778 node->set_const_flag (true, false);
1779 changed = true;
1781 if (dump_file)
1782 fprintf (dump_file, "Function found to be non-looping: %s\n",
1783 current_function_name ());
1785 break;
1787 case IPA_PURE:
1788 if (!DECL_PURE_P (current_function_decl))
1790 if (!skip)
1792 node->set_pure_flag (true, l->looping);
1793 changed = true;
1795 warn_function_pure (current_function_decl, !l->looping);
1796 if (dump_file)
1797 fprintf (dump_file, "Function found to be %spure: %s\n",
1798 l->looping ? "looping " : "",
1799 current_function_name ());
1801 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1802 && !l->looping)
1804 if (!skip)
1806 node->set_pure_flag (true, false);
1807 changed = true;
1809 if (dump_file)
1810 fprintf (dump_file, "Function found to be non-looping: %s\n",
1811 current_function_name ());
1813 break;
1815 default:
1816 break;
1818 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1820 node->set_nothrow_flag (true);
1821 changed = true;
1822 if (dump_file)
1823 fprintf (dump_file, "Function found to be nothrow: %s\n",
1824 current_function_name ());
1826 free (l);
1827 if (changed)
1828 return execute_fixup_cfg ();
1829 else
1830 return 0;
1833 } // anon namespace
1835 gimple_opt_pass *
1836 make_pass_local_pure_const (gcc::context *ctxt)
1838 return new pass_local_pure_const (ctxt);
1841 /* Emit noreturn warnings. */
1843 namespace {
1845 const pass_data pass_data_warn_function_noreturn =
1847 GIMPLE_PASS, /* type */
1848 "*warn_function_noreturn", /* name */
1849 OPTGROUP_NONE, /* optinfo_flags */
1850 TV_NONE, /* tv_id */
1851 PROP_cfg, /* properties_required */
1852 0, /* properties_provided */
1853 0, /* properties_destroyed */
1854 0, /* todo_flags_start */
1855 0, /* todo_flags_finish */
1858 class pass_warn_function_noreturn : public gimple_opt_pass
1860 public:
1861 pass_warn_function_noreturn (gcc::context *ctxt)
1862 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1865 /* opt_pass methods: */
1866 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
1867 virtual unsigned int execute (function *fun)
1869 if (!TREE_THIS_VOLATILE (current_function_decl)
1870 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1871 warn_function_noreturn (current_function_decl);
1872 return 0;
1875 }; // class pass_warn_function_noreturn
1877 } // anon namespace
1879 gimple_opt_pass *
1880 make_pass_warn_function_noreturn (gcc::context *ctxt)
1882 return new pass_warn_function_noreturn (ctxt);
1885 /* Simple local pass for pure const discovery reusing the analysis from
1886 ipa_pure_const. This pass is effective when executed together with
1887 other optimization passes in early optimization pass queue. */
1889 namespace {
1891 const pass_data pass_data_nothrow =
1893 GIMPLE_PASS, /* type */
1894 "nothrow", /* name */
1895 OPTGROUP_NONE, /* optinfo_flags */
1896 TV_IPA_PURE_CONST, /* tv_id */
1897 0, /* properties_required */
1898 0, /* properties_provided */
1899 0, /* properties_destroyed */
1900 0, /* todo_flags_start */
1901 0, /* todo_flags_finish */
1904 class pass_nothrow : public gimple_opt_pass
1906 public:
1907 pass_nothrow (gcc::context *ctxt)
1908 : gimple_opt_pass (pass_data_nothrow, ctxt)
1911 /* opt_pass methods: */
1912 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
1913 virtual bool gate (function *) { return optimize; }
1914 virtual unsigned int execute (function *);
1916 }; // class pass_nothrow
1918 unsigned int
1919 pass_nothrow::execute (function *)
1921 struct cgraph_node *node;
1922 basic_block this_block;
1924 if (TREE_NOTHROW (current_function_decl))
1925 return 0;
1927 node = cgraph_node::get (current_function_decl);
1929 /* We run during lowering, we can not really use availability yet. */
1930 if (cgraph_node::get (current_function_decl)->get_availability ()
1931 <= AVAIL_INTERPOSABLE)
1933 if (dump_file)
1934 fprintf (dump_file, "Function is interposable;"
1935 " not analyzing.\n");
1936 return true;
1939 FOR_EACH_BB_FN (this_block, cfun)
1941 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1942 !gsi_end_p (gsi);
1943 gsi_next (&gsi))
1944 if (stmt_can_throw_external (gsi_stmt (gsi)))
1946 if (is_gimple_call (gsi_stmt (gsi)))
1948 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1949 if (callee_t && recursive_call_p (current_function_decl,
1950 callee_t))
1951 continue;
1954 if (dump_file)
1956 fprintf (dump_file, "Statement can throw: ");
1957 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
1959 return 0;
1963 node->set_nothrow_flag (true);
1964 if (dump_file)
1965 fprintf (dump_file, "Function found to be nothrow: %s\n",
1966 current_function_name ());
1967 return 0;
1970 } // anon namespace
1972 gimple_opt_pass *
1973 make_pass_nothrow (gcc::context *ctxt)
1975 return new pass_nothrow (ctxt);