gcc/cp:
[official-gcc.git] / gcc / ipa-pure-const.c
blob6f7b32c12bc3311e9b1b487938ff6447b3f738f0
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "tree.h"
39 #include "print-tree.h"
40 #include "calls.h"
41 #include "predict.h"
42 #include "vec.h"
43 #include "hashtab.h"
44 #include "hash-set.h"
45 #include "machmode.h"
46 #include "hard-reg-set.h"
47 #include "input.h"
48 #include "function.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "cfganal.h"
52 #include "basic-block.h"
53 #include "tree-ssa-alias.h"
54 #include "internal-fn.h"
55 #include "tree-eh.h"
56 #include "gimple-expr.h"
57 #include "is-a.h"
58 #include "gimple.h"
59 #include "gimple-iterator.h"
60 #include "gimple-walk.h"
61 #include "tree-cfg.h"
62 #include "tree-ssa-loop-niter.h"
63 #include "tree-inline.h"
64 #include "tree-pass.h"
65 #include "langhooks.h"
66 #include "hash-map.h"
67 #include "plugin-api.h"
68 #include "ipa-ref.h"
69 #include "cgraph.h"
70 #include "ipa-utils.h"
71 #include "flags.h"
72 #include "diagnostic.h"
73 #include "gimple-pretty-print.h"
74 #include "langhooks.h"
75 #include "target.h"
76 #include "lto-streamer.h"
77 #include "data-streamer.h"
78 #include "tree-streamer.h"
79 #include "cfgloop.h"
80 #include "tree-scalar-evolution.h"
81 #include "intl.h"
82 #include "opts.h"
84 /* Lattice values for const and pure functions. Everything starts out
85 being const, then may drop to pure and then neither depending on
86 what is found. */
87 enum pure_const_state_e
89 IPA_CONST,
90 IPA_PURE,
91 IPA_NEITHER
94 const char *pure_const_names[3] = {"const", "pure", "neither"};
96 /* Holder for the const_state. There is one of these per function
97 decl. */
98 struct funct_state_d
100 /* See above. */
101 enum pure_const_state_e pure_const_state;
102 /* What user set here; we can be always sure about this. */
103 enum pure_const_state_e state_previously_known;
104 bool looping_previously_known;
106 /* True if the function could possibly infinite loop. There are a
107 lot of ways that this could be determined. We are pretty
108 conservative here. While it is possible to cse pure and const
109 calls, it is not legal to have dce get rid of the call if there
110 is a possibility that the call could infinite loop since this is
111 a behavioral change. */
112 bool looping;
114 bool can_throw;
117 /* State used when we know nothing about function. */
118 static struct funct_state_d varying_state
119 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
122 typedef struct funct_state_d * funct_state;
124 /* The storage of the funct_state is abstracted because there is the
125 possibility that it may be desirable to move this to the cgraph
126 local info. */
128 /* Array, indexed by cgraph node uid, of function states. */
130 static vec<funct_state> funct_state_vec;
132 static bool gate_pure_const (void);
134 namespace {
136 const pass_data pass_data_ipa_pure_const =
138 IPA_PASS, /* type */
139 "pure-const", /* name */
140 OPTGROUP_NONE, /* optinfo_flags */
141 TV_IPA_PURE_CONST, /* tv_id */
142 0, /* properties_required */
143 0, /* properties_provided */
144 0, /* properties_destroyed */
145 0, /* todo_flags_start */
146 0, /* todo_flags_finish */
149 class pass_ipa_pure_const : public ipa_opt_pass_d
151 public:
152 pass_ipa_pure_const(gcc::context *ctxt);
154 /* opt_pass methods: */
155 bool gate (function *) { return gate_pure_const (); }
156 unsigned int execute (function *fun);
158 void register_hooks (void);
160 private:
161 bool init_p;
163 /* Holders of ipa cgraph hooks: */
164 struct cgraph_node_hook_list *function_insertion_hook_holder;
165 struct cgraph_2node_hook_list *node_duplication_hook_holder;
166 struct cgraph_node_hook_list *node_removal_hook_holder;
168 }; // class pass_ipa_pure_const
170 } // anon namespace
172 /* Try to guess if function body will always be visible to compiler
173 when compiling the call and whether compiler will be able
174 to propagate the information by itself. */
176 static bool
177 function_always_visible_to_compiler_p (tree decl)
179 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
182 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
183 is true if the function is known to be finite. The diagnostic is
184 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
185 OPTION, this function may initialize it and it is always returned
186 by the function. */
188 static hash_set<tree> *
189 suggest_attribute (int option, tree decl, bool known_finite,
190 hash_set<tree> *warned_about,
191 const char * attrib_name)
193 if (!option_enabled (option, &global_options))
194 return warned_about;
195 if (TREE_THIS_VOLATILE (decl)
196 || (known_finite && function_always_visible_to_compiler_p (decl)))
197 return warned_about;
199 if (!warned_about)
200 warned_about = new hash_set<tree>;
201 if (warned_about->contains (decl))
202 return warned_about;
203 warned_about->add (decl);
204 warning_at (DECL_SOURCE_LOCATION (decl),
205 option,
206 known_finite
207 ? _("function might be candidate for attribute %<%s%>")
208 : _("function might be candidate for attribute %<%s%>"
209 " if it is known to return normally"), attrib_name);
210 return warned_about;
213 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
214 is true if the function is known to be finite. */
216 static void
217 warn_function_pure (tree decl, bool known_finite)
219 static hash_set<tree> *warned_about;
221 warned_about
222 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
223 known_finite, warned_about, "pure");
226 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
227 is true if the function is known to be finite. */
229 static void
230 warn_function_const (tree decl, bool known_finite)
232 static hash_set<tree> *warned_about;
233 warned_about
234 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
235 known_finite, warned_about, "const");
238 static void
239 warn_function_noreturn (tree decl)
241 static hash_set<tree> *warned_about;
242 if (!lang_hooks.missing_noreturn_ok_p (decl)
243 && targetm.warn_func_return (decl))
244 warned_about
245 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
246 true, warned_about, "noreturn");
249 /* Return true if we have a function state for NODE. */
251 static inline bool
252 has_function_state (struct cgraph_node *node)
254 if (!funct_state_vec.exists ()
255 || funct_state_vec.length () <= (unsigned int)node->uid)
256 return false;
257 return funct_state_vec[node->uid] != NULL;
260 /* Return the function state from NODE. */
262 static inline funct_state
263 get_function_state (struct cgraph_node *node)
265 if (!funct_state_vec.exists ()
266 || funct_state_vec.length () <= (unsigned int)node->uid
267 || !funct_state_vec[node->uid])
268 /* We might want to put correct previously_known state into varying. */
269 return &varying_state;
270 return funct_state_vec[node->uid];
273 /* Set the function state S for NODE. */
275 static inline void
276 set_function_state (struct cgraph_node *node, funct_state s)
278 if (!funct_state_vec.exists ()
279 || funct_state_vec.length () <= (unsigned int)node->uid)
280 funct_state_vec.safe_grow_cleared (node->uid + 1);
281 funct_state_vec[node->uid] = s;
284 /* Check to see if the use (or definition when CHECKING_WRITE is true)
285 variable T is legal in a function that is either pure or const. */
287 static inline void
288 check_decl (funct_state local,
289 tree t, bool checking_write, bool ipa)
291 /* Do not want to do anything with volatile except mark any
292 function that uses one to be not const or pure. */
293 if (TREE_THIS_VOLATILE (t))
295 local->pure_const_state = IPA_NEITHER;
296 if (dump_file)
297 fprintf (dump_file, " Volatile operand is not const/pure");
298 return;
301 /* Do not care about a local automatic that is not static. */
302 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
303 return;
305 /* If the variable has the "used" attribute, treat it as if it had a
306 been touched by the devil. */
307 if (DECL_PRESERVE_P (t))
309 local->pure_const_state = IPA_NEITHER;
310 if (dump_file)
311 fprintf (dump_file, " Used static/global variable is not const/pure\n");
312 return;
315 /* In IPA mode we are not interested in checking actual loads and stores;
316 they will be processed at propagation time using ipa_ref. */
317 if (ipa)
318 return;
320 /* Since we have dealt with the locals and params cases above, if we
321 are CHECKING_WRITE, this cannot be a pure or constant
322 function. */
323 if (checking_write)
325 local->pure_const_state = IPA_NEITHER;
326 if (dump_file)
327 fprintf (dump_file, " static/global memory write is not const/pure\n");
328 return;
331 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
333 /* Readonly reads are safe. */
334 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
335 return; /* Read of a constant, do not change the function state. */
336 else
338 if (dump_file)
339 fprintf (dump_file, " global memory read is not const\n");
340 /* Just a regular read. */
341 if (local->pure_const_state == IPA_CONST)
342 local->pure_const_state = IPA_PURE;
345 else
347 /* Compilation level statics can be read if they are readonly
348 variables. */
349 if (TREE_READONLY (t))
350 return;
352 if (dump_file)
353 fprintf (dump_file, " static memory read is not const\n");
354 /* Just a regular read. */
355 if (local->pure_const_state == IPA_CONST)
356 local->pure_const_state = IPA_PURE;
361 /* Check to see if the use (or definition when CHECKING_WRITE is true)
362 variable T is legal in a function that is either pure or const. */
364 static inline void
365 check_op (funct_state local, tree t, bool checking_write)
367 t = get_base_address (t);
368 if (t && TREE_THIS_VOLATILE (t))
370 local->pure_const_state = IPA_NEITHER;
371 if (dump_file)
372 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
373 return;
375 else if (t
376 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
377 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
378 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
380 if (dump_file)
381 fprintf (dump_file, " Indirect ref to local memory is OK\n");
382 return;
384 else if (checking_write)
386 local->pure_const_state = IPA_NEITHER;
387 if (dump_file)
388 fprintf (dump_file, " Indirect ref write is not const/pure\n");
389 return;
391 else
393 if (dump_file)
394 fprintf (dump_file, " Indirect ref read is not const\n");
395 if (local->pure_const_state == IPA_CONST)
396 local->pure_const_state = IPA_PURE;
400 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
402 static void
403 state_from_flags (enum pure_const_state_e *state, bool *looping,
404 int flags, bool cannot_lead_to_return)
406 *looping = false;
407 if (flags & ECF_LOOPING_CONST_OR_PURE)
409 *looping = true;
410 if (dump_file && (dump_flags & TDF_DETAILS))
411 fprintf (dump_file, " looping");
413 if (flags & ECF_CONST)
415 *state = IPA_CONST;
416 if (dump_file && (dump_flags & TDF_DETAILS))
417 fprintf (dump_file, " const\n");
419 else if (flags & ECF_PURE)
421 *state = IPA_PURE;
422 if (dump_file && (dump_flags & TDF_DETAILS))
423 fprintf (dump_file, " pure\n");
425 else if (cannot_lead_to_return)
427 *state = IPA_PURE;
428 *looping = true;
429 if (dump_file && (dump_flags & TDF_DETAILS))
430 fprintf (dump_file, " ignoring side effects->pure looping\n");
432 else
434 if (dump_file && (dump_flags & TDF_DETAILS))
435 fprintf (dump_file, " neither\n");
436 *state = IPA_NEITHER;
437 *looping = true;
441 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
442 into STATE and LOOPING better of the two variants.
443 Be sure to merge looping correctly. IPA_NEITHER functions
444 have looping 0 even if they don't have to return. */
446 static inline void
447 better_state (enum pure_const_state_e *state, bool *looping,
448 enum pure_const_state_e state2, bool looping2)
450 if (state2 < *state)
452 if (*state == IPA_NEITHER)
453 *looping = looping2;
454 else
455 *looping = MIN (*looping, looping2);
456 *state = state2;
458 else if (state2 != IPA_NEITHER)
459 *looping = MIN (*looping, looping2);
462 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
463 into STATE and LOOPING worse of the two variants. */
465 static inline void
466 worse_state (enum pure_const_state_e *state, bool *looping,
467 enum pure_const_state_e state2, bool looping2)
469 *state = MAX (*state, state2);
470 *looping = MAX (*looping, looping2);
473 /* Recognize special cases of builtins that are by themselves not pure or const
474 but function using them is. */
475 static bool
476 special_builtin_state (enum pure_const_state_e *state, bool *looping,
477 tree callee)
479 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
480 switch (DECL_FUNCTION_CODE (callee))
482 case BUILT_IN_RETURN:
483 case BUILT_IN_UNREACHABLE:
484 case BUILT_IN_ALLOCA:
485 case BUILT_IN_ALLOCA_WITH_ALIGN:
486 case BUILT_IN_STACK_SAVE:
487 case BUILT_IN_STACK_RESTORE:
488 case BUILT_IN_EH_POINTER:
489 case BUILT_IN_EH_FILTER:
490 case BUILT_IN_UNWIND_RESUME:
491 case BUILT_IN_CXA_END_CLEANUP:
492 case BUILT_IN_EH_COPY_VALUES:
493 case BUILT_IN_FRAME_ADDRESS:
494 case BUILT_IN_APPLY:
495 case BUILT_IN_APPLY_ARGS:
496 *looping = false;
497 *state = IPA_CONST;
498 return true;
499 case BUILT_IN_PREFETCH:
500 *looping = true;
501 *state = IPA_CONST;
502 return true;
503 default:
504 break;
506 return false;
509 /* Check the parameters of a function call to CALL_EXPR to see if
510 there are any references in the parameters that are not allowed for
511 pure or const functions. Also check to see if this is either an
512 indirect call, a call outside the compilation unit, or has special
513 attributes that may also effect the purity. The CALL_EXPR node for
514 the entire call expression. */
516 static void
517 check_call (funct_state local, gimple call, bool ipa)
519 int flags = gimple_call_flags (call);
520 tree callee_t = gimple_call_fndecl (call);
521 bool possibly_throws = stmt_could_throw_p (call);
522 bool possibly_throws_externally = (possibly_throws
523 && stmt_can_throw_external (call));
525 if (possibly_throws)
527 unsigned int i;
528 for (i = 0; i < gimple_num_ops (call); i++)
529 if (gimple_op (call, i)
530 && tree_could_throw_p (gimple_op (call, i)))
532 if (possibly_throws && cfun->can_throw_non_call_exceptions)
534 if (dump_file)
535 fprintf (dump_file, " operand can throw; looping\n");
536 local->looping = true;
538 if (possibly_throws_externally)
540 if (dump_file)
541 fprintf (dump_file, " operand can throw externally\n");
542 local->can_throw = true;
547 /* The const and pure flags are set by a variety of places in the
548 compiler (including here). If someone has already set the flags
549 for the callee, (such as for some of the builtins) we will use
550 them, otherwise we will compute our own information.
552 Const and pure functions have less clobber effects than other
553 functions so we process these first. Otherwise if it is a call
554 outside the compilation unit or an indirect call we punt. This
555 leaves local calls which will be processed by following the call
556 graph. */
557 if (callee_t)
559 enum pure_const_state_e call_state;
560 bool call_looping;
562 if (special_builtin_state (&call_state, &call_looping, callee_t))
564 worse_state (&local->pure_const_state, &local->looping,
565 call_state, call_looping);
566 return;
568 /* When bad things happen to bad functions, they cannot be const
569 or pure. */
570 if (setjmp_call_p (callee_t))
572 if (dump_file)
573 fprintf (dump_file, " setjmp is not const/pure\n");
574 local->looping = true;
575 local->pure_const_state = IPA_NEITHER;
578 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
579 switch (DECL_FUNCTION_CODE (callee_t))
581 case BUILT_IN_LONGJMP:
582 case BUILT_IN_NONLOCAL_GOTO:
583 if (dump_file)
584 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
585 local->pure_const_state = IPA_NEITHER;
586 local->looping = true;
587 break;
588 default:
589 break;
593 /* When not in IPA mode, we can still handle self recursion. */
594 if (!ipa && callee_t
595 && recursive_call_p (current_function_decl, callee_t))
597 if (dump_file)
598 fprintf (dump_file, " Recursive call can loop.\n");
599 local->looping = true;
601 /* Either callee is unknown or we are doing local analysis.
602 Look to see if there are any bits available for the callee (such as by
603 declaration or because it is builtin) and process solely on the basis of
604 those bits. */
605 else if (!ipa)
607 enum pure_const_state_e call_state;
608 bool call_looping;
609 if (possibly_throws && cfun->can_throw_non_call_exceptions)
611 if (dump_file)
612 fprintf (dump_file, " can throw; looping\n");
613 local->looping = true;
615 if (possibly_throws_externally)
617 if (dump_file)
619 fprintf (dump_file, " can throw externally to lp %i\n",
620 lookup_stmt_eh_lp (call));
621 if (callee_t)
622 fprintf (dump_file, " callee:%s\n",
623 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
625 local->can_throw = true;
627 if (dump_file && (dump_flags & TDF_DETAILS))
628 fprintf (dump_file, " checking flags for call:");
629 state_from_flags (&call_state, &call_looping, flags,
630 ((flags & (ECF_NORETURN | ECF_NOTHROW))
631 == (ECF_NORETURN | ECF_NOTHROW))
632 || (!flag_exceptions && (flags & ECF_NORETURN)));
633 worse_state (&local->pure_const_state, &local->looping,
634 call_state, call_looping);
636 /* Direct functions calls are handled by IPA propagation. */
639 /* Wrapper around check_decl for loads in local more. */
641 static bool
642 check_load (gimple, tree op, tree, void *data)
644 if (DECL_P (op))
645 check_decl ((funct_state)data, op, false, false);
646 else
647 check_op ((funct_state)data, op, false);
648 return false;
651 /* Wrapper around check_decl for stores in local more. */
653 static bool
654 check_store (gimple, tree op, tree, void *data)
656 if (DECL_P (op))
657 check_decl ((funct_state)data, op, true, false);
658 else
659 check_op ((funct_state)data, op, true);
660 return false;
663 /* Wrapper around check_decl for loads in ipa mode. */
665 static bool
666 check_ipa_load (gimple, tree op, tree, void *data)
668 if (DECL_P (op))
669 check_decl ((funct_state)data, op, false, true);
670 else
671 check_op ((funct_state)data, op, false);
672 return false;
675 /* Wrapper around check_decl for stores in ipa mode. */
677 static bool
678 check_ipa_store (gimple, tree op, tree, void *data)
680 if (DECL_P (op))
681 check_decl ((funct_state)data, op, true, true);
682 else
683 check_op ((funct_state)data, op, true);
684 return false;
687 /* Look into pointer pointed to by GSIP and figure out what interesting side
688 effects it has. */
689 static void
690 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
692 gimple stmt = gsi_stmt (*gsip);
694 if (is_gimple_debug (stmt))
695 return;
697 if (dump_file)
699 fprintf (dump_file, " scanning: ");
700 print_gimple_stmt (dump_file, stmt, 0, 0);
703 if (gimple_has_volatile_ops (stmt)
704 && !gimple_clobber_p (stmt))
706 local->pure_const_state = IPA_NEITHER;
707 if (dump_file)
708 fprintf (dump_file, " Volatile stmt is not const/pure\n");
711 /* Look for loads and stores. */
712 walk_stmt_load_store_ops (stmt, local,
713 ipa ? check_ipa_load : check_load,
714 ipa ? check_ipa_store : check_store);
716 if (gimple_code (stmt) != GIMPLE_CALL
717 && stmt_could_throw_p (stmt))
719 if (cfun->can_throw_non_call_exceptions)
721 if (dump_file)
722 fprintf (dump_file, " can throw; looping\n");
723 local->looping = true;
725 if (stmt_can_throw_external (stmt))
727 if (dump_file)
728 fprintf (dump_file, " can throw externally\n");
729 local->can_throw = true;
731 else
732 if (dump_file)
733 fprintf (dump_file, " can throw\n");
735 switch (gimple_code (stmt))
737 case GIMPLE_CALL:
738 check_call (local, stmt, ipa);
739 break;
740 case GIMPLE_LABEL:
741 if (DECL_NONLOCAL (gimple_label_label (stmt)))
742 /* Target of long jump. */
744 if (dump_file)
745 fprintf (dump_file, " nonlocal label is not const/pure\n");
746 local->pure_const_state = IPA_NEITHER;
748 break;
749 case GIMPLE_ASM:
750 if (gimple_asm_clobbers_memory_p (stmt))
752 if (dump_file)
753 fprintf (dump_file, " memory asm clobber is not const/pure\n");
754 /* Abandon all hope, ye who enter here. */
755 local->pure_const_state = IPA_NEITHER;
757 if (gimple_asm_volatile_p (stmt))
759 if (dump_file)
760 fprintf (dump_file, " volatile is not const/pure\n");
761 /* Abandon all hope, ye who enter here. */
762 local->pure_const_state = IPA_NEITHER;
763 local->looping = true;
765 return;
766 default:
767 break;
772 /* This is the main routine for finding the reference patterns for
773 global variables within a function FN. */
775 static funct_state
776 analyze_function (struct cgraph_node *fn, bool ipa)
778 tree decl = fn->decl;
779 funct_state l;
780 basic_block this_block;
782 l = XCNEW (struct funct_state_d);
783 l->pure_const_state = IPA_CONST;
784 l->state_previously_known = IPA_NEITHER;
785 l->looping_previously_known = true;
786 l->looping = false;
787 l->can_throw = false;
788 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
789 flags_from_decl_or_type (fn->decl),
790 fn->cannot_return_p ());
792 if (fn->thunk.thunk_p || fn->alias)
794 /* Thunk gets propagated through, so nothing interesting happens. */
795 gcc_assert (ipa);
796 return l;
799 if (dump_file)
801 fprintf (dump_file, "\n\n local analysis of %s\n ",
802 fn->name ());
805 push_cfun (DECL_STRUCT_FUNCTION (decl));
807 FOR_EACH_BB_FN (this_block, cfun)
809 gimple_stmt_iterator gsi;
810 struct walk_stmt_info wi;
812 memset (&wi, 0, sizeof (wi));
813 for (gsi = gsi_start_bb (this_block);
814 !gsi_end_p (gsi);
815 gsi_next (&gsi))
817 check_stmt (&gsi, l, ipa);
818 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
819 goto end;
823 end:
824 if (l->pure_const_state != IPA_NEITHER)
826 /* Const functions cannot have back edges (an
827 indication of possible infinite loop side
828 effect. */
829 if (mark_dfs_back_edges ())
831 /* Preheaders are needed for SCEV to work.
832 Simple latches and recorded exits improve chances that loop will
833 proved to be finite in testcases such as in loop-15.c
834 and loop-24.c */
835 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
836 | LOOPS_HAVE_SIMPLE_LATCHES
837 | LOOPS_HAVE_RECORDED_EXITS);
838 if (dump_file && (dump_flags & TDF_DETAILS))
839 flow_loops_dump (dump_file, NULL, 0);
840 if (mark_irreducible_loops ())
842 if (dump_file)
843 fprintf (dump_file, " has irreducible loops\n");
844 l->looping = true;
846 else
848 struct loop *loop;
849 scev_initialize ();
850 FOR_EACH_LOOP (loop, 0)
851 if (!finite_loop_p (loop))
853 if (dump_file)
854 fprintf (dump_file, " can not prove finiteness of "
855 "loop %i\n", loop->num);
856 l->looping =true;
857 break;
859 scev_finalize ();
861 loop_optimizer_finalize ();
865 if (dump_file && (dump_flags & TDF_DETAILS))
866 fprintf (dump_file, " checking previously known:");
868 better_state (&l->pure_const_state, &l->looping,
869 l->state_previously_known,
870 l->looping_previously_known);
871 if (TREE_NOTHROW (decl))
872 l->can_throw = false;
874 pop_cfun ();
875 if (dump_file)
877 if (l->looping)
878 fprintf (dump_file, "Function is locally looping.\n");
879 if (l->can_throw)
880 fprintf (dump_file, "Function is locally throwing.\n");
881 if (l->pure_const_state == IPA_CONST)
882 fprintf (dump_file, "Function is locally const.\n");
883 if (l->pure_const_state == IPA_PURE)
884 fprintf (dump_file, "Function is locally pure.\n");
886 return l;
889 /* Called when new function is inserted to callgraph late. */
890 static void
891 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
893 if (node->get_availability () < AVAIL_INTERPOSABLE)
894 return;
895 /* There are some shared nodes, in particular the initializers on
896 static declarations. We do not need to scan them more than once
897 since all we would be interested in are the addressof
898 operations. */
899 if (node->get_availability () > AVAIL_INTERPOSABLE)
900 set_function_state (node, analyze_function (node, true));
903 /* Called when new clone is inserted to callgraph late. */
905 static void
906 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
907 void *data ATTRIBUTE_UNUSED)
909 if (has_function_state (src))
911 funct_state l = XNEW (struct funct_state_d);
912 gcc_assert (!has_function_state (dst));
913 memcpy (l, get_function_state (src), sizeof (*l));
914 set_function_state (dst, l);
918 /* Called when new clone is inserted to callgraph late. */
920 static void
921 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
923 if (has_function_state (node))
925 funct_state l = get_function_state (node);
926 if (l != &varying_state)
927 free (l);
928 set_function_state (node, NULL);
933 void
934 pass_ipa_pure_const::
935 register_hooks (void)
937 if (init_p)
938 return;
940 init_p = true;
942 node_removal_hook_holder =
943 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
944 node_duplication_hook_holder =
945 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
946 function_insertion_hook_holder =
947 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
951 /* Analyze each function in the cgraph to see if it is locally PURE or
952 CONST. */
954 static void
955 pure_const_generate_summary (void)
957 struct cgraph_node *node;
959 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
960 pass->register_hooks ();
962 /* Process all of the functions.
964 We process AVAIL_INTERPOSABLE functions. We can not use the results
965 by default, but the info can be used at LTO with -fwhole-program or
966 when function got cloned and the clone is AVAILABLE. */
968 FOR_EACH_DEFINED_FUNCTION (node)
969 if (node->get_availability () >= AVAIL_INTERPOSABLE)
970 set_function_state (node, analyze_function (node, true));
974 /* Serialize the ipa info for lto. */
976 static void
977 pure_const_write_summary (void)
979 struct cgraph_node *node;
980 struct lto_simple_output_block *ob
981 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
982 unsigned int count = 0;
983 lto_symtab_encoder_iterator lsei;
984 lto_symtab_encoder_t encoder;
986 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
988 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
989 lsei_next_function_in_partition (&lsei))
991 node = lsei_cgraph_node (lsei);
992 if (node->definition && has_function_state (node))
993 count++;
996 streamer_write_uhwi_stream (ob->main_stream, count);
998 /* Process all of the functions. */
999 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1000 lsei_next_function_in_partition (&lsei))
1002 node = lsei_cgraph_node (lsei);
1003 if (node->definition && has_function_state (node))
1005 struct bitpack_d bp;
1006 funct_state fs;
1007 int node_ref;
1008 lto_symtab_encoder_t encoder;
1010 fs = get_function_state (node);
1012 encoder = ob->decl_state->symtab_node_encoder;
1013 node_ref = lto_symtab_encoder_encode (encoder, node);
1014 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1016 /* Note that flags will need to be read in the opposite
1017 order as we are pushing the bitflags into FLAGS. */
1018 bp = bitpack_create (ob->main_stream);
1019 bp_pack_value (&bp, fs->pure_const_state, 2);
1020 bp_pack_value (&bp, fs->state_previously_known, 2);
1021 bp_pack_value (&bp, fs->looping_previously_known, 1);
1022 bp_pack_value (&bp, fs->looping, 1);
1023 bp_pack_value (&bp, fs->can_throw, 1);
1024 streamer_write_bitpack (&bp);
1028 lto_destroy_simple_output_block (ob);
1032 /* Deserialize the ipa info for lto. */
1034 static void
1035 pure_const_read_summary (void)
1037 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1038 struct lto_file_decl_data *file_data;
1039 unsigned int j = 0;
1041 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1042 pass->register_hooks ();
1044 while ((file_data = file_data_vec[j++]))
1046 const char *data;
1047 size_t len;
1048 struct lto_input_block *ib
1049 = lto_create_simple_input_block (file_data,
1050 LTO_section_ipa_pure_const,
1051 &data, &len);
1052 if (ib)
1054 unsigned int i;
1055 unsigned int count = streamer_read_uhwi (ib);
1057 for (i = 0; i < count; i++)
1059 unsigned int index;
1060 struct cgraph_node *node;
1061 struct bitpack_d bp;
1062 funct_state fs;
1063 lto_symtab_encoder_t encoder;
1065 fs = XCNEW (struct funct_state_d);
1066 index = streamer_read_uhwi (ib);
1067 encoder = file_data->symtab_node_encoder;
1068 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1069 index));
1070 set_function_state (node, fs);
1072 /* Note that the flags must be read in the opposite
1073 order in which they were written (the bitflags were
1074 pushed into FLAGS). */
1075 bp = streamer_read_bitpack (ib);
1076 fs->pure_const_state
1077 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1078 fs->state_previously_known
1079 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1080 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1081 fs->looping = bp_unpack_value (&bp, 1);
1082 fs->can_throw = bp_unpack_value (&bp, 1);
1083 if (dump_file)
1085 int flags = flags_from_decl_or_type (node->decl);
1086 fprintf (dump_file, "Read info for %s/%i ",
1087 node->name (),
1088 node->order);
1089 if (flags & ECF_CONST)
1090 fprintf (dump_file, " const");
1091 if (flags & ECF_PURE)
1092 fprintf (dump_file, " pure");
1093 if (flags & ECF_NOTHROW)
1094 fprintf (dump_file, " nothrow");
1095 fprintf (dump_file, "\n pure const state: %s\n",
1096 pure_const_names[fs->pure_const_state]);
1097 fprintf (dump_file, " previously known state: %s\n",
1098 pure_const_names[fs->looping_previously_known]);
1099 if (fs->looping)
1100 fprintf (dump_file," function is locally looping\n");
1101 if (fs->looping_previously_known)
1102 fprintf (dump_file," function is previously known looping\n");
1103 if (fs->can_throw)
1104 fprintf (dump_file," function is locally throwing\n");
1108 lto_destroy_simple_input_block (file_data,
1109 LTO_section_ipa_pure_const,
1110 ib, data, len);
1116 static bool
1117 ignore_edge (struct cgraph_edge *e)
1119 return (!e->can_throw_external);
1122 /* Return true if NODE is self recursive function.
1123 Indirectly recursive functions appears as non-trivial strongly
1124 connected components, so we need to care about self recursion
1125 only. */
1127 static bool
1128 self_recursive_p (struct cgraph_node *node)
1130 struct cgraph_edge *e;
1131 for (e = node->callees; e; e = e->next_callee)
1132 if (e->callee->function_symbol () == node)
1133 return true;
1134 return false;
1137 /* Produce transitive closure over the callgraph and compute pure/const
1138 attributes. */
1140 static void
1141 propagate_pure_const (void)
1143 struct cgraph_node *node;
1144 struct cgraph_node *w;
1145 struct cgraph_node **order =
1146 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1147 int order_pos;
1148 int i;
1149 struct ipa_dfs_info * w_info;
1151 order_pos = ipa_reduced_postorder (order, true, false, NULL);
1152 if (dump_file)
1154 cgraph_node::dump_cgraph (dump_file);
1155 ipa_print_order (dump_file, "reduced", order, order_pos);
1158 /* Propagate the local information through the call graph to produce
1159 the global information. All the nodes within a cycle will have
1160 the same info so we collapse cycles first. Then we can do the
1161 propagation in one pass from the leaves to the roots. */
1162 for (i = 0; i < order_pos; i++ )
1164 enum pure_const_state_e pure_const_state = IPA_CONST;
1165 bool looping = false;
1166 int count = 0;
1167 node = order[i];
1169 if (node->alias)
1170 continue;
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1173 fprintf (dump_file, "Starting cycle\n");
1175 /* Find the worst state for any node in the cycle. */
1176 w = node;
1177 while (w && pure_const_state != IPA_NEITHER)
1179 struct cgraph_edge *e;
1180 struct cgraph_edge *ie;
1181 int i;
1182 struct ipa_ref *ref = NULL;
1184 funct_state w_l = get_function_state (w);
1185 if (dump_file && (dump_flags & TDF_DETAILS))
1186 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1187 w->name (),
1188 w->order,
1189 pure_const_names[w_l->pure_const_state],
1190 w_l->looping);
1192 /* First merge in function body properties. */
1193 worse_state (&pure_const_state, &looping,
1194 w_l->pure_const_state, w_l->looping);
1195 if (pure_const_state == IPA_NEITHER)
1196 break;
1198 /* For overwritable nodes we can not assume anything. */
1199 if (w->get_availability () == AVAIL_INTERPOSABLE)
1201 worse_state (&pure_const_state, &looping,
1202 w_l->state_previously_known,
1203 w_l->looping_previously_known);
1204 if (dump_file && (dump_flags & TDF_DETAILS))
1206 fprintf (dump_file,
1207 " Overwritable. state %s looping %i\n",
1208 pure_const_names[w_l->state_previously_known],
1209 w_l->looping_previously_known);
1211 break;
1214 count++;
1216 /* We consider recursive cycles as possibly infinite.
1217 This might be relaxed since infinite recursion leads to stack
1218 overflow. */
1219 if (count > 1)
1220 looping = true;
1222 /* Now walk the edges and merge in callee properties. */
1223 for (e = w->callees; e; e = e->next_callee)
1225 enum availability avail;
1226 struct cgraph_node *y = e->callee->function_symbol (&avail);
1227 enum pure_const_state_e edge_state = IPA_CONST;
1228 bool edge_looping = false;
1230 if (dump_file && (dump_flags & TDF_DETAILS))
1232 fprintf (dump_file,
1233 " Call to %s/%i",
1234 e->callee->name (),
1235 e->callee->order);
1237 if (avail > AVAIL_INTERPOSABLE)
1239 funct_state y_l = get_function_state (y);
1240 if (dump_file && (dump_flags & TDF_DETAILS))
1242 fprintf (dump_file,
1243 " state:%s looping:%i\n",
1244 pure_const_names[y_l->pure_const_state],
1245 y_l->looping);
1247 if (y_l->pure_const_state > IPA_PURE
1248 && e->cannot_lead_to_return_p ())
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1251 fprintf (dump_file,
1252 " Ignoring side effects"
1253 " -> pure, looping\n");
1254 edge_state = IPA_PURE;
1255 edge_looping = true;
1257 else
1259 edge_state = y_l->pure_const_state;
1260 edge_looping = y_l->looping;
1263 else if (special_builtin_state (&edge_state, &edge_looping,
1264 y->decl))
1266 else
1267 state_from_flags (&edge_state, &edge_looping,
1268 flags_from_decl_or_type (y->decl),
1269 e->cannot_lead_to_return_p ());
1271 /* Merge the results with what we already know. */
1272 better_state (&edge_state, &edge_looping,
1273 w_l->state_previously_known,
1274 w_l->looping_previously_known);
1275 worse_state (&pure_const_state, &looping,
1276 edge_state, edge_looping);
1277 if (pure_const_state == IPA_NEITHER)
1278 break;
1280 if (pure_const_state == IPA_NEITHER)
1281 break;
1283 /* Now process the indirect call. */
1284 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1286 enum pure_const_state_e edge_state = IPA_CONST;
1287 bool edge_looping = false;
1289 if (dump_file && (dump_flags & TDF_DETAILS))
1290 fprintf (dump_file, " Indirect call");
1291 state_from_flags (&edge_state, &edge_looping,
1292 ie->indirect_info->ecf_flags,
1293 ie->cannot_lead_to_return_p ());
1294 /* Merge the results with what we already know. */
1295 better_state (&edge_state, &edge_looping,
1296 w_l->state_previously_known,
1297 w_l->looping_previously_known);
1298 worse_state (&pure_const_state, &looping,
1299 edge_state, edge_looping);
1300 if (pure_const_state == IPA_NEITHER)
1301 break;
1303 if (pure_const_state == IPA_NEITHER)
1304 break;
1306 /* And finally all loads and stores. */
1307 for (i = 0; w->iterate_reference (i, ref); i++)
1309 enum pure_const_state_e ref_state = IPA_CONST;
1310 bool ref_looping = false;
1311 switch (ref->use)
1313 case IPA_REF_LOAD:
1314 /* readonly reads are safe. */
1315 if (TREE_READONLY (ref->referred->decl))
1316 break;
1317 if (dump_file && (dump_flags & TDF_DETAILS))
1318 fprintf (dump_file, " nonreadonly global var read\n");
1319 ref_state = IPA_PURE;
1320 break;
1321 case IPA_REF_STORE:
1322 if (ref->cannot_lead_to_return ())
1323 break;
1324 ref_state = IPA_NEITHER;
1325 if (dump_file && (dump_flags & TDF_DETAILS))
1326 fprintf (dump_file, " global var write\n");
1327 break;
1328 case IPA_REF_ADDR:
1329 case IPA_REF_CHKP:
1330 break;
1331 default:
1332 gcc_unreachable ();
1334 better_state (&ref_state, &ref_looping,
1335 w_l->state_previously_known,
1336 w_l->looping_previously_known);
1337 worse_state (&pure_const_state, &looping,
1338 ref_state, ref_looping);
1339 if (pure_const_state == IPA_NEITHER)
1340 break;
1342 w_info = (struct ipa_dfs_info *) w->aux;
1343 w = w_info->next_cycle;
1345 if (dump_file && (dump_flags & TDF_DETAILS))
1346 fprintf (dump_file, "Result %s looping %i\n",
1347 pure_const_names [pure_const_state],
1348 looping);
1350 /* Copy back the region's pure_const_state which is shared by
1351 all nodes in the region. */
1352 w = node;
1353 while (w)
1355 funct_state w_l = get_function_state (w);
1356 enum pure_const_state_e this_state = pure_const_state;
1357 bool this_looping = looping;
1359 if (w_l->state_previously_known != IPA_NEITHER
1360 && this_state > w_l->state_previously_known)
1362 this_state = w_l->state_previously_known;
1363 this_looping |= w_l->looping_previously_known;
1365 if (!this_looping && self_recursive_p (w))
1366 this_looping = true;
1367 if (!w_l->looping_previously_known)
1368 this_looping = false;
1370 /* All nodes within a cycle share the same info. */
1371 w_l->pure_const_state = this_state;
1372 w_l->looping = this_looping;
1374 /* Inline clones share declaration with their offline copies;
1375 do not modify their declarations since the offline copy may
1376 be different. */
1377 if (!w->global.inlined_to)
1378 switch (this_state)
1380 case IPA_CONST:
1381 if (!TREE_READONLY (w->decl))
1383 warn_function_const (w->decl, !this_looping);
1384 if (dump_file)
1385 fprintf (dump_file, "Function found to be %sconst: %s\n",
1386 this_looping ? "looping " : "",
1387 w->name ());
1389 w->set_const_flag (true, this_looping);
1390 break;
1392 case IPA_PURE:
1393 if (!DECL_PURE_P (w->decl))
1395 warn_function_pure (w->decl, !this_looping);
1396 if (dump_file)
1397 fprintf (dump_file, "Function found to be %spure: %s\n",
1398 this_looping ? "looping " : "",
1399 w->name ());
1401 w->set_pure_flag (true, this_looping);
1402 break;
1404 default:
1405 break;
1407 w_info = (struct ipa_dfs_info *) w->aux;
1408 w = w_info->next_cycle;
1412 ipa_free_postorder_info ();
1413 free (order);
1416 /* Produce transitive closure over the callgraph and compute nothrow
1417 attributes. */
1419 static void
1420 propagate_nothrow (void)
1422 struct cgraph_node *node;
1423 struct cgraph_node *w;
1424 struct cgraph_node **order =
1425 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1426 int order_pos;
1427 int i;
1428 struct ipa_dfs_info * w_info;
1430 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1431 if (dump_file)
1433 cgraph_node::dump_cgraph (dump_file);
1434 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1437 /* Propagate the local information through the call graph to produce
1438 the global information. All the nodes within a cycle will have
1439 the same info so we collapse cycles first. Then we can do the
1440 propagation in one pass from the leaves to the roots. */
1441 for (i = 0; i < order_pos; i++ )
1443 bool can_throw = false;
1444 node = order[i];
1446 if (node->alias)
1447 continue;
1449 /* Find the worst state for any node in the cycle. */
1450 w = node;
1451 while (w && !can_throw)
1453 struct cgraph_edge *e, *ie;
1454 funct_state w_l = get_function_state (w);
1456 if (w_l->can_throw
1457 || w->get_availability () == AVAIL_INTERPOSABLE)
1458 can_throw = true;
1460 for (e = w->callees; e && !can_throw; e = e->next_callee)
1462 enum availability avail;
1463 struct cgraph_node *y = e->callee->function_symbol (&avail);
1465 if (avail > AVAIL_INTERPOSABLE)
1467 funct_state y_l = get_function_state (y);
1469 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1470 && e->can_throw_external)
1471 can_throw = true;
1473 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1474 can_throw = true;
1476 for (ie = w->indirect_calls; ie && !can_throw; ie = ie->next_callee)
1477 if (ie->can_throw_external)
1478 can_throw = true;
1479 w_info = (struct ipa_dfs_info *) w->aux;
1480 w = w_info->next_cycle;
1483 /* Copy back the region's pure_const_state which is shared by
1484 all nodes in the region. */
1485 w = node;
1486 while (w)
1488 funct_state w_l = get_function_state (w);
1489 if (!can_throw && !TREE_NOTHROW (w->decl))
1491 /* Inline clones share declaration with their offline copies;
1492 do not modify their declarations since the offline copy may
1493 be different. */
1494 if (!w->global.inlined_to)
1496 w->set_nothrow_flag (true);
1497 if (dump_file)
1498 fprintf (dump_file, "Function found to be nothrow: %s\n",
1499 w->name ());
1502 else if (can_throw && !TREE_NOTHROW (w->decl))
1503 w_l->can_throw = true;
1504 w_info = (struct ipa_dfs_info *) w->aux;
1505 w = w_info->next_cycle;
1509 ipa_free_postorder_info ();
1510 free (order);
1514 /* Produce the global information by preforming a transitive closure
1515 on the local information that was produced by generate_summary. */
1517 unsigned int
1518 pass_ipa_pure_const::
1519 execute (function *)
1521 struct cgraph_node *node;
1523 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1524 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1525 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1527 /* Nothrow makes more function to not lead to return and improve
1528 later analysis. */
1529 propagate_nothrow ();
1530 propagate_pure_const ();
1532 /* Cleanup. */
1533 FOR_EACH_FUNCTION (node)
1534 if (has_function_state (node))
1535 free (get_function_state (node));
1536 funct_state_vec.release ();
1537 return 0;
1540 static bool
1541 gate_pure_const (void)
1543 return (flag_ipa_pure_const
1544 /* Don't bother doing anything if the program has errors. */
1545 && !seen_error ());
1548 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1549 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1550 pure_const_generate_summary, /* generate_summary */
1551 pure_const_write_summary, /* write_summary */
1552 pure_const_read_summary, /* read_summary */
1553 NULL, /* write_optimization_summary */
1554 NULL, /* read_optimization_summary */
1555 NULL, /* stmt_fixup */
1556 0, /* function_transform_todo_flags_start */
1557 NULL, /* function_transform */
1558 NULL), /* variable_transform */
1559 init_p(false),
1560 function_insertion_hook_holder(NULL),
1561 node_duplication_hook_holder(NULL),
1562 node_removal_hook_holder(NULL)
1566 ipa_opt_pass_d *
1567 make_pass_ipa_pure_const (gcc::context *ctxt)
1569 return new pass_ipa_pure_const (ctxt);
1572 /* Return true if function should be skipped for local pure const analysis. */
1574 static bool
1575 skip_function_for_local_pure_const (struct cgraph_node *node)
1577 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1578 we must not promote functions that are called by already processed functions. */
1580 if (function_called_by_processed_nodes_p ())
1582 if (dump_file)
1583 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1584 return true;
1586 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1588 if (dump_file)
1589 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1590 return true;
1592 return false;
1595 /* Simple local pass for pure const discovery reusing the analysis from
1596 ipa_pure_const. This pass is effective when executed together with
1597 other optimization passes in early optimization pass queue. */
1599 namespace {
1601 const pass_data pass_data_local_pure_const =
1603 GIMPLE_PASS, /* type */
1604 "local-pure-const", /* name */
1605 OPTGROUP_NONE, /* optinfo_flags */
1606 TV_IPA_PURE_CONST, /* tv_id */
1607 0, /* properties_required */
1608 0, /* properties_provided */
1609 0, /* properties_destroyed */
1610 0, /* todo_flags_start */
1611 0, /* todo_flags_finish */
1614 class pass_local_pure_const : public gimple_opt_pass
1616 public:
1617 pass_local_pure_const (gcc::context *ctxt)
1618 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1621 /* opt_pass methods: */
1622 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1623 virtual bool gate (function *) { return gate_pure_const (); }
1624 virtual unsigned int execute (function *);
1626 }; // class pass_local_pure_const
1628 unsigned int
1629 pass_local_pure_const::execute (function *fun)
1631 bool changed = false;
1632 funct_state l;
1633 bool skip;
1634 struct cgraph_node *node;
1636 node = cgraph_node::get (current_function_decl);
1637 skip = skip_function_for_local_pure_const (node);
1638 if (!warn_suggest_attribute_const
1639 && !warn_suggest_attribute_pure
1640 && skip)
1641 return 0;
1643 l = analyze_function (node, false);
1645 /* Do NORETURN discovery. */
1646 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1647 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1649 warn_function_noreturn (fun->decl);
1650 if (dump_file)
1651 fprintf (dump_file, "Function found to be noreturn: %s\n",
1652 current_function_name ());
1654 /* Update declaration and reduce profile to executed once. */
1655 TREE_THIS_VOLATILE (current_function_decl) = 1;
1656 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1657 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1659 changed = true;
1662 switch (l->pure_const_state)
1664 case IPA_CONST:
1665 if (!TREE_READONLY (current_function_decl))
1667 warn_function_const (current_function_decl, !l->looping);
1668 if (!skip)
1670 node->set_const_flag (true, l->looping);
1671 changed = true;
1673 if (dump_file)
1674 fprintf (dump_file, "Function found to be %sconst: %s\n",
1675 l->looping ? "looping " : "",
1676 current_function_name ());
1678 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1679 && !l->looping)
1681 if (!skip)
1683 node->set_const_flag (true, false);
1684 changed = true;
1686 if (dump_file)
1687 fprintf (dump_file, "Function found to be non-looping: %s\n",
1688 current_function_name ());
1690 break;
1692 case IPA_PURE:
1693 if (!DECL_PURE_P (current_function_decl))
1695 if (!skip)
1697 node->set_pure_flag (true, l->looping);
1698 changed = true;
1700 warn_function_pure (current_function_decl, !l->looping);
1701 if (dump_file)
1702 fprintf (dump_file, "Function found to be %spure: %s\n",
1703 l->looping ? "looping " : "",
1704 current_function_name ());
1706 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1707 && !l->looping)
1709 if (!skip)
1711 node->set_pure_flag (true, false);
1712 changed = true;
1714 if (dump_file)
1715 fprintf (dump_file, "Function found to be non-looping: %s\n",
1716 current_function_name ());
1718 break;
1720 default:
1721 break;
1723 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1725 node->set_nothrow_flag (true);
1726 changed = true;
1727 if (dump_file)
1728 fprintf (dump_file, "Function found to be nothrow: %s\n",
1729 current_function_name ());
1731 free (l);
1732 if (changed)
1733 return execute_fixup_cfg ();
1734 else
1735 return 0;
1738 } // anon namespace
1740 gimple_opt_pass *
1741 make_pass_local_pure_const (gcc::context *ctxt)
1743 return new pass_local_pure_const (ctxt);
1746 /* Emit noreturn warnings. */
1748 namespace {
1750 const pass_data pass_data_warn_function_noreturn =
1752 GIMPLE_PASS, /* type */
1753 "*warn_function_noreturn", /* name */
1754 OPTGROUP_NONE, /* optinfo_flags */
1755 TV_NONE, /* tv_id */
1756 PROP_cfg, /* properties_required */
1757 0, /* properties_provided */
1758 0, /* properties_destroyed */
1759 0, /* todo_flags_start */
1760 0, /* todo_flags_finish */
1763 class pass_warn_function_noreturn : public gimple_opt_pass
1765 public:
1766 pass_warn_function_noreturn (gcc::context *ctxt)
1767 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1770 /* opt_pass methods: */
1771 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
1772 virtual unsigned int execute (function *fun)
1774 if (!TREE_THIS_VOLATILE (current_function_decl)
1775 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1776 warn_function_noreturn (current_function_decl);
1777 return 0;
1780 }; // class pass_warn_function_noreturn
1782 } // anon namespace
1784 gimple_opt_pass *
1785 make_pass_warn_function_noreturn (gcc::context *ctxt)
1787 return new pass_warn_function_noreturn (ctxt);