name-lookup.c (lookup_arg_dependent): Use conditional timevars.
[official-gcc.git] / gcc / ipa-pure-const.c
blob36212cda67b5708107ae66e577cc633a8e2eddd0
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
31 /* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
45 #include "ggc.h"
46 #include "ipa-utils.h"
47 #include "gimple.h"
48 #include "cgraph.h"
49 #include "output.h"
50 #include "flags.h"
51 #include "timevar.h"
52 #include "diagnostic.h"
53 #include "gimple-pretty-print.h"
54 #include "langhooks.h"
55 #include "target.h"
56 #include "lto-streamer.h"
57 #include "data-streamer.h"
58 #include "tree-streamer.h"
59 #include "cfgloop.h"
60 #include "tree-scalar-evolution.h"
61 #include "intl.h"
62 #include "opts.h"
64 static struct pointer_set_t *visited_nodes;
66 /* Lattice values for const and pure functions. Everything starts out
67 being const, then may drop to pure and then neither depending on
68 what is found. */
69 enum pure_const_state_e
71 IPA_CONST,
72 IPA_PURE,
73 IPA_NEITHER
76 const char *pure_const_names[3] = {"const", "pure", "neither"};
78 /* Holder for the const_state. There is one of these per function
79 decl. */
80 struct funct_state_d
82 /* See above. */
83 enum pure_const_state_e pure_const_state;
84 /* What user set here; we can be always sure about this. */
85 enum pure_const_state_e state_previously_known;
86 bool looping_previously_known;
88 /* True if the function could possibly infinite loop. There are a
89 lot of ways that this could be determined. We are pretty
90 conservative here. While it is possible to cse pure and const
91 calls, it is not legal to have dce get rid of the call if there
92 is a possibility that the call could infinite loop since this is
93 a behavioral change. */
94 bool looping;
96 bool can_throw;
99 /* State used when we know nothing about function. */
100 static struct funct_state_d varying_state
101 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
104 typedef struct funct_state_d * funct_state;
106 /* The storage of the funct_state is abstracted because there is the
107 possibility that it may be desirable to move this to the cgraph
108 local info. */
110 /* Array, indexed by cgraph node uid, of function states. */
112 DEF_VEC_P (funct_state);
113 DEF_VEC_ALLOC_P (funct_state, heap);
114 static VEC (funct_state, heap) *funct_state_vec;
116 /* Holders of ipa cgraph hooks: */
117 static struct cgraph_node_hook_list *function_insertion_hook_holder;
118 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
119 static struct cgraph_node_hook_list *node_removal_hook_holder;
121 /* Try to guess if function body will always be visible to compiler
122 when compiling the call and whether compiler will be able
123 to propagate the information by itself. */
125 static bool
126 function_always_visible_to_compiler_p (tree decl)
128 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
131 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
132 is true if the function is known to be finite. The diagnostic is
133 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
134 OPTION, this function may initialize it and it is always returned
135 by the function. */
137 static struct pointer_set_t *
138 suggest_attribute (int option, tree decl, bool known_finite,
139 struct pointer_set_t *warned_about,
140 const char * attrib_name)
142 if (!option_enabled (option, &global_options))
143 return warned_about;
144 if (TREE_THIS_VOLATILE (decl)
145 || (known_finite && function_always_visible_to_compiler_p (decl)))
146 return warned_about;
148 if (!warned_about)
149 warned_about = pointer_set_create ();
150 if (pointer_set_contains (warned_about, decl))
151 return warned_about;
152 pointer_set_insert (warned_about, decl);
153 warning_at (DECL_SOURCE_LOCATION (decl),
154 option,
155 known_finite
156 ? _("function might be candidate for attribute %<%s%>")
157 : _("function might be candidate for attribute %<%s%>"
158 " if it is known to return normally"), attrib_name);
159 return warned_about;
162 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
163 is true if the function is known to be finite. */
165 static void
166 warn_function_pure (tree decl, bool known_finite)
168 static struct pointer_set_t *warned_about;
170 warned_about
171 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
172 known_finite, warned_about, "pure");
175 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
176 is true if the function is known to be finite. */
178 static void
179 warn_function_const (tree decl, bool known_finite)
181 static struct pointer_set_t *warned_about;
182 warned_about
183 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
184 known_finite, warned_about, "const");
187 void
188 warn_function_noreturn (tree decl)
190 static struct pointer_set_t *warned_about;
191 if (!lang_hooks.missing_noreturn_ok_p (decl))
192 warned_about
193 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
194 true, warned_about, "noreturn");
196 /* Init the function state. */
198 static void
199 finish_state (void)
201 free (funct_state_vec);
205 /* Return true if we have a function state for NODE. */
207 static inline bool
208 has_function_state (struct cgraph_node *node)
210 if (!funct_state_vec
211 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
212 return false;
213 return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
216 /* Return the function state from NODE. */
218 static inline funct_state
219 get_function_state (struct cgraph_node *node)
221 if (!funct_state_vec
222 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid
223 || !VEC_index (funct_state, funct_state_vec, node->uid))
224 /* We might want to put correct previously_known state into varying. */
225 return &varying_state;
226 return VEC_index (funct_state, funct_state_vec, node->uid);
229 /* Set the function state S for NODE. */
231 static inline void
232 set_function_state (struct cgraph_node *node, funct_state s)
234 if (!funct_state_vec
235 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
236 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
237 VEC_replace (funct_state, funct_state_vec, node->uid, s);
240 /* Check to see if the use (or definition when CHECKING_WRITE is true)
241 variable T is legal in a function that is either pure or const. */
243 static inline void
244 check_decl (funct_state local,
245 tree t, bool checking_write, bool ipa)
247 /* Do not want to do anything with volatile except mark any
248 function that uses one to be not const or pure. */
249 if (TREE_THIS_VOLATILE (t))
251 local->pure_const_state = IPA_NEITHER;
252 if (dump_file)
253 fprintf (dump_file, " Volatile operand is not const/pure");
254 return;
257 /* Do not care about a local automatic that is not static. */
258 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
259 return;
261 /* If the variable has the "used" attribute, treat it as if it had a
262 been touched by the devil. */
263 if (DECL_PRESERVE_P (t))
265 local->pure_const_state = IPA_NEITHER;
266 if (dump_file)
267 fprintf (dump_file, " Used static/global variable is not const/pure\n");
268 return;
271 /* In IPA mode we are not interested in checking actual loads and stores;
272 they will be processed at propagation time using ipa_ref. */
273 if (ipa)
274 return;
276 /* Since we have dealt with the locals and params cases above, if we
277 are CHECKING_WRITE, this cannot be a pure or constant
278 function. */
279 if (checking_write)
281 local->pure_const_state = IPA_NEITHER;
282 if (dump_file)
283 fprintf (dump_file, " static/global memory write is not const/pure\n");
284 return;
287 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
289 /* Readonly reads are safe. */
290 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
291 return; /* Read of a constant, do not change the function state. */
292 else
294 if (dump_file)
295 fprintf (dump_file, " global memory read is not const\n");
296 /* Just a regular read. */
297 if (local->pure_const_state == IPA_CONST)
298 local->pure_const_state = IPA_PURE;
301 else
303 /* Compilation level statics can be read if they are readonly
304 variables. */
305 if (TREE_READONLY (t))
306 return;
308 if (dump_file)
309 fprintf (dump_file, " static memory read is not const\n");
310 /* Just a regular read. */
311 if (local->pure_const_state == IPA_CONST)
312 local->pure_const_state = IPA_PURE;
317 /* Check to see if the use (or definition when CHECKING_WRITE is true)
318 variable T is legal in a function that is either pure or const. */
320 static inline void
321 check_op (funct_state local, tree t, bool checking_write)
323 t = get_base_address (t);
324 if (t && TREE_THIS_VOLATILE (t))
326 local->pure_const_state = IPA_NEITHER;
327 if (dump_file)
328 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
329 return;
331 else if (t
332 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
333 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
334 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
336 if (dump_file)
337 fprintf (dump_file, " Indirect ref to local memory is OK\n");
338 return;
340 else if (checking_write)
342 local->pure_const_state = IPA_NEITHER;
343 if (dump_file)
344 fprintf (dump_file, " Indirect ref write is not const/pure\n");
345 return;
347 else
349 if (dump_file)
350 fprintf (dump_file, " Indirect ref read is not const\n");
351 if (local->pure_const_state == IPA_CONST)
352 local->pure_const_state = IPA_PURE;
356 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
358 static void
359 state_from_flags (enum pure_const_state_e *state, bool *looping,
360 int flags, bool cannot_lead_to_return)
362 *looping = false;
363 if (flags & ECF_LOOPING_CONST_OR_PURE)
365 *looping = true;
366 if (dump_file && (dump_flags & TDF_DETAILS))
367 fprintf (dump_file, " looping");
369 if (flags & ECF_CONST)
371 *state = IPA_CONST;
372 if (dump_file && (dump_flags & TDF_DETAILS))
373 fprintf (dump_file, " const\n");
375 else if (flags & ECF_PURE)
377 *state = IPA_PURE;
378 if (dump_file && (dump_flags & TDF_DETAILS))
379 fprintf (dump_file, " pure\n");
381 else if (cannot_lead_to_return)
383 *state = IPA_PURE;
384 *looping = true;
385 if (dump_file && (dump_flags & TDF_DETAILS))
386 fprintf (dump_file, " ignoring side effects->pure looping\n");
388 else
390 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file, " neihter\n");
392 *state = IPA_NEITHER;
393 *looping = true;
397 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
398 into STATE and LOOPING better of the two variants.
399 Be sure to merge looping correctly. IPA_NEITHER functions
400 have looping 0 even if they don't have to return. */
402 static inline void
403 better_state (enum pure_const_state_e *state, bool *looping,
404 enum pure_const_state_e state2, bool looping2)
406 if (state2 < *state)
408 if (*state == IPA_NEITHER)
409 *looping = looping2;
410 else
411 *looping = MIN (*looping, looping2);
413 else if (state2 != IPA_NEITHER)
414 *looping = MIN (*looping, looping2);
417 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
418 into STATE and LOOPING worse of the two variants. */
420 static inline void
421 worse_state (enum pure_const_state_e *state, bool *looping,
422 enum pure_const_state_e state2, bool looping2)
424 *state = MAX (*state, state2);
425 *looping = MAX (*looping, looping2);
428 /* Recognize special cases of builtins that are by themselves not pure or const
429 but function using them is. */
430 static bool
431 special_builtin_state (enum pure_const_state_e *state, bool *looping,
432 tree callee)
434 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
435 switch (DECL_FUNCTION_CODE (callee))
437 case BUILT_IN_RETURN:
438 case BUILT_IN_UNREACHABLE:
439 case BUILT_IN_ALLOCA:
440 case BUILT_IN_STACK_SAVE:
441 case BUILT_IN_STACK_RESTORE:
442 case BUILT_IN_EH_POINTER:
443 case BUILT_IN_EH_FILTER:
444 case BUILT_IN_UNWIND_RESUME:
445 case BUILT_IN_CXA_END_CLEANUP:
446 case BUILT_IN_EH_COPY_VALUES:
447 case BUILT_IN_FRAME_ADDRESS:
448 case BUILT_IN_APPLY:
449 case BUILT_IN_APPLY_ARGS:
450 *looping = false;
451 *state = IPA_CONST;
452 return true;
453 case BUILT_IN_PREFETCH:
454 *looping = true;
455 *state = IPA_CONST;
456 return true;
458 return false;
461 /* Check the parameters of a function call to CALL_EXPR to see if
462 there are any references in the parameters that are not allowed for
463 pure or const functions. Also check to see if this is either an
464 indirect call, a call outside the compilation unit, or has special
465 attributes that may also effect the purity. The CALL_EXPR node for
466 the entire call expression. */
468 static void
469 check_call (funct_state local, gimple call, bool ipa)
471 int flags = gimple_call_flags (call);
472 tree callee_t = gimple_call_fndecl (call);
473 bool possibly_throws = stmt_could_throw_p (call);
474 bool possibly_throws_externally = (possibly_throws
475 && stmt_can_throw_external (call));
477 if (possibly_throws)
479 unsigned int i;
480 for (i = 0; i < gimple_num_ops (call); i++)
481 if (gimple_op (call, i)
482 && tree_could_throw_p (gimple_op (call, i)))
484 if (possibly_throws && cfun->can_throw_non_call_exceptions)
486 if (dump_file)
487 fprintf (dump_file, " operand can throw; looping\n");
488 local->looping = true;
490 if (possibly_throws_externally)
492 if (dump_file)
493 fprintf (dump_file, " operand can throw externally\n");
494 local->can_throw = true;
499 /* The const and pure flags are set by a variety of places in the
500 compiler (including here). If someone has already set the flags
501 for the callee, (such as for some of the builtins) we will use
502 them, otherwise we will compute our own information.
504 Const and pure functions have less clobber effects than other
505 functions so we process these first. Otherwise if it is a call
506 outside the compilation unit or an indirect call we punt. This
507 leaves local calls which will be processed by following the call
508 graph. */
509 if (callee_t)
511 enum pure_const_state_e call_state;
512 bool call_looping;
514 if (special_builtin_state (&call_state, &call_looping, callee_t))
516 worse_state (&local->pure_const_state, &local->looping,
517 call_state, call_looping);
518 return;
520 /* When bad things happen to bad functions, they cannot be const
521 or pure. */
522 if (setjmp_call_p (callee_t))
524 if (dump_file)
525 fprintf (dump_file, " setjmp is not const/pure\n");
526 local->looping = true;
527 local->pure_const_state = IPA_NEITHER;
530 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
531 switch (DECL_FUNCTION_CODE (callee_t))
533 case BUILT_IN_LONGJMP:
534 case BUILT_IN_NONLOCAL_GOTO:
535 if (dump_file)
536 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
537 local->pure_const_state = IPA_NEITHER;
538 local->looping = true;
539 break;
540 default:
541 break;
545 /* When not in IPA mode, we can still handle self recursion. */
546 if (!ipa && callee_t == current_function_decl)
548 if (dump_file)
549 fprintf (dump_file, " Recursive call can loop.\n");
550 local->looping = true;
552 /* Either callee is unknown or we are doing local analysis.
553 Look to see if there are any bits available for the callee (such as by
554 declaration or because it is builtin) and process solely on the basis of
555 those bits. */
556 else if (!ipa)
558 enum pure_const_state_e call_state;
559 bool call_looping;
560 if (possibly_throws && cfun->can_throw_non_call_exceptions)
562 if (dump_file)
563 fprintf (dump_file, " can throw; looping\n");
564 local->looping = true;
566 if (possibly_throws_externally)
568 if (dump_file)
570 fprintf (dump_file, " can throw externally to lp %i\n",
571 lookup_stmt_eh_lp (call));
572 if (callee_t)
573 fprintf (dump_file, " callee:%s\n",
574 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
576 local->can_throw = true;
578 if (dump_file && (dump_flags & TDF_DETAILS))
579 fprintf (dump_file, " checking flags for call:");
580 state_from_flags (&call_state, &call_looping, flags,
581 ((flags & (ECF_NORETURN | ECF_NOTHROW))
582 == (ECF_NORETURN | ECF_NOTHROW))
583 || (!flag_exceptions && (flags & ECF_NORETURN)));
584 worse_state (&local->pure_const_state, &local->looping,
585 call_state, call_looping);
587 /* Direct functions calls are handled by IPA propagation. */
590 /* Wrapper around check_decl for loads in local more. */
592 static bool
593 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
595 if (DECL_P (op))
596 check_decl ((funct_state)data, op, false, false);
597 else
598 check_op ((funct_state)data, op, false);
599 return false;
602 /* Wrapper around check_decl for stores in local more. */
604 static bool
605 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
607 if (DECL_P (op))
608 check_decl ((funct_state)data, op, true, false);
609 else
610 check_op ((funct_state)data, op, true);
611 return false;
614 /* Wrapper around check_decl for loads in ipa mode. */
616 static bool
617 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
619 if (DECL_P (op))
620 check_decl ((funct_state)data, op, false, true);
621 else
622 check_op ((funct_state)data, op, false);
623 return false;
626 /* Wrapper around check_decl for stores in ipa mode. */
628 static bool
629 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
631 if (DECL_P (op))
632 check_decl ((funct_state)data, op, true, true);
633 else
634 check_op ((funct_state)data, op, true);
635 return false;
638 /* Look into pointer pointed to by GSIP and figure out what interesting side
639 effects it has. */
640 static void
641 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
643 gimple stmt = gsi_stmt (*gsip);
645 if (is_gimple_debug (stmt))
646 return;
648 if (dump_file)
650 fprintf (dump_file, " scanning: ");
651 print_gimple_stmt (dump_file, stmt, 0, 0);
654 if (gimple_has_volatile_ops (stmt))
656 local->pure_const_state = IPA_NEITHER;
657 if (dump_file)
658 fprintf (dump_file, " Volatile stmt is not const/pure\n");
661 /* Look for loads and stores. */
662 walk_stmt_load_store_ops (stmt, local,
663 ipa ? check_ipa_load : check_load,
664 ipa ? check_ipa_store : check_store);
666 if (gimple_code (stmt) != GIMPLE_CALL
667 && stmt_could_throw_p (stmt))
669 if (cfun->can_throw_non_call_exceptions)
671 if (dump_file)
672 fprintf (dump_file, " can throw; looping");
673 local->looping = true;
675 if (stmt_can_throw_external (stmt))
677 if (dump_file)
678 fprintf (dump_file, " can throw externally");
679 local->can_throw = true;
682 switch (gimple_code (stmt))
684 case GIMPLE_CALL:
685 check_call (local, stmt, ipa);
686 break;
687 case GIMPLE_LABEL:
688 if (DECL_NONLOCAL (gimple_label_label (stmt)))
689 /* Target of long jump. */
691 if (dump_file)
692 fprintf (dump_file, " nonlocal label is not const/pure");
693 local->pure_const_state = IPA_NEITHER;
695 break;
696 case GIMPLE_ASM:
697 if (gimple_asm_clobbers_memory_p (stmt))
699 if (dump_file)
700 fprintf (dump_file, " memory asm clobber is not const/pure");
701 /* Abandon all hope, ye who enter here. */
702 local->pure_const_state = IPA_NEITHER;
704 if (gimple_asm_volatile_p (stmt))
706 if (dump_file)
707 fprintf (dump_file, " volatile is not const/pure");
708 /* Abandon all hope, ye who enter here. */
709 local->pure_const_state = IPA_NEITHER;
710 local->looping = true;
712 return;
713 default:
714 break;
719 /* This is the main routine for finding the reference patterns for
720 global variables within a function FN. */
722 static funct_state
723 analyze_function (struct cgraph_node *fn, bool ipa)
725 tree decl = fn->decl;
726 tree old_decl = current_function_decl;
727 funct_state l;
728 basic_block this_block;
730 l = XCNEW (struct funct_state_d);
731 l->pure_const_state = IPA_CONST;
732 l->state_previously_known = IPA_NEITHER;
733 l->looping_previously_known = true;
734 l->looping = false;
735 l->can_throw = false;
736 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
737 flags_from_decl_or_type (fn->decl),
738 cgraph_node_cannot_return (fn));
740 if (fn->thunk.thunk_p || fn->alias)
742 /* Thunk gets propagated through, so nothing interesting happens. */
743 gcc_assert (ipa);
744 return l;
747 if (dump_file)
749 fprintf (dump_file, "\n\n local analysis of %s\n ",
750 cgraph_node_name (fn));
753 push_cfun (DECL_STRUCT_FUNCTION (decl));
754 current_function_decl = decl;
756 FOR_EACH_BB (this_block)
758 gimple_stmt_iterator gsi;
759 struct walk_stmt_info wi;
761 memset (&wi, 0, sizeof(wi));
762 for (gsi = gsi_start_bb (this_block);
763 !gsi_end_p (gsi);
764 gsi_next (&gsi))
766 check_stmt (&gsi, l, ipa);
767 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
768 goto end;
772 end:
773 if (l->pure_const_state != IPA_NEITHER)
775 /* Const functions cannot have back edges (an
776 indication of possible infinite loop side
777 effect. */
778 if (mark_dfs_back_edges ())
780 /* Preheaders are needed for SCEV to work.
781 Simple latches and recorded exits improve chances that loop will
782 proved to be finite in testcases such as in loop-15.c and loop-24.c */
783 loop_optimizer_init (LOOPS_NORMAL
784 | LOOPS_HAVE_RECORDED_EXITS);
785 if (dump_file && (dump_flags & TDF_DETAILS))
786 flow_loops_dump (dump_file, NULL, 0);
787 if (mark_irreducible_loops ())
789 if (dump_file)
790 fprintf (dump_file, " has irreducible loops\n");
791 l->looping = true;
793 else
795 loop_iterator li;
796 struct loop *loop;
797 scev_initialize ();
798 FOR_EACH_LOOP (li, loop, 0)
799 if (!finite_loop_p (loop))
801 if (dump_file)
802 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
803 l->looping =true;
804 break;
806 scev_finalize ();
808 loop_optimizer_finalize ();
812 if (dump_file && (dump_flags & TDF_DETAILS))
813 fprintf (dump_file, " checking previously known:");
815 better_state (&l->pure_const_state, &l->looping,
816 l->state_previously_known,
817 l->looping_previously_known);
818 if (TREE_NOTHROW (decl))
819 l->can_throw = false;
821 pop_cfun ();
822 current_function_decl = old_decl;
823 if (dump_file)
825 if (l->looping)
826 fprintf (dump_file, "Function is locally looping.\n");
827 if (l->can_throw)
828 fprintf (dump_file, "Function is locally throwing.\n");
829 if (l->pure_const_state == IPA_CONST)
830 fprintf (dump_file, "Function is locally const.\n");
831 if (l->pure_const_state == IPA_PURE)
832 fprintf (dump_file, "Function is locally pure.\n");
834 return l;
837 /* Called when new function is inserted to callgraph late. */
838 static void
839 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
841 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
842 return;
843 /* There are some shared nodes, in particular the initializers on
844 static declarations. We do not need to scan them more than once
845 since all we would be interested in are the addressof
846 operations. */
847 visited_nodes = pointer_set_create ();
848 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
849 set_function_state (node, analyze_function (node, true));
850 pointer_set_destroy (visited_nodes);
851 visited_nodes = NULL;
854 /* Called when new clone is inserted to callgraph late. */
856 static void
857 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
858 void *data ATTRIBUTE_UNUSED)
860 if (has_function_state (src))
862 funct_state l = XNEW (struct funct_state_d);
863 gcc_assert (!has_function_state (dst));
864 memcpy (l, get_function_state (src), sizeof (*l));
865 set_function_state (dst, l);
869 /* Called when new clone is inserted to callgraph late. */
871 static void
872 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
874 if (has_function_state (node))
876 funct_state l = get_function_state (node);
877 if (l != &varying_state)
878 free (l);
879 set_function_state (node, NULL);
884 static void
885 register_hooks (void)
887 static bool init_p = false;
889 if (init_p)
890 return;
892 init_p = true;
894 node_removal_hook_holder =
895 cgraph_add_node_removal_hook (&remove_node_data, NULL);
896 node_duplication_hook_holder =
897 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
898 function_insertion_hook_holder =
899 cgraph_add_function_insertion_hook (&add_new_function, NULL);
903 /* Analyze each function in the cgraph to see if it is locally PURE or
904 CONST. */
906 static void
907 generate_summary (void)
909 struct cgraph_node *node;
911 register_hooks ();
913 /* There are some shared nodes, in particular the initializers on
914 static declarations. We do not need to scan them more than once
915 since all we would be interested in are the addressof
916 operations. */
917 visited_nodes = pointer_set_create ();
919 /* Process all of the functions.
921 We process AVAIL_OVERWRITABLE functions. We can not use the results
922 by default, but the info can be used at LTO with -fwhole-program or
923 when function got cloned and the clone is AVAILABLE. */
925 for (node = cgraph_nodes; node; node = node->next)
926 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
927 set_function_state (node, analyze_function (node, true));
929 pointer_set_destroy (visited_nodes);
930 visited_nodes = NULL;
934 /* Serialize the ipa info for lto. */
936 static void
937 pure_const_write_summary (cgraph_node_set set,
938 varpool_node_set vset ATTRIBUTE_UNUSED)
940 struct cgraph_node *node;
941 struct lto_simple_output_block *ob
942 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
943 unsigned int count = 0;
944 cgraph_node_set_iterator csi;
946 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
948 node = csi_node (csi);
949 if (node->analyzed && has_function_state (node))
950 count++;
953 streamer_write_uhwi_stream (ob->main_stream, count);
955 /* Process all of the functions. */
956 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
958 node = csi_node (csi);
959 if (node->analyzed && has_function_state (node))
961 struct bitpack_d bp;
962 funct_state fs;
963 int node_ref;
964 lto_cgraph_encoder_t encoder;
966 fs = get_function_state (node);
968 encoder = ob->decl_state->cgraph_node_encoder;
969 node_ref = lto_cgraph_encoder_encode (encoder, node);
970 streamer_write_uhwi_stream (ob->main_stream, node_ref);
972 /* Note that flags will need to be read in the opposite
973 order as we are pushing the bitflags into FLAGS. */
974 bp = bitpack_create (ob->main_stream);
975 bp_pack_value (&bp, fs->pure_const_state, 2);
976 bp_pack_value (&bp, fs->state_previously_known, 2);
977 bp_pack_value (&bp, fs->looping_previously_known, 1);
978 bp_pack_value (&bp, fs->looping, 1);
979 bp_pack_value (&bp, fs->can_throw, 1);
980 streamer_write_bitpack (&bp);
984 lto_destroy_simple_output_block (ob);
988 /* Deserialize the ipa info for lto. */
990 static void
991 pure_const_read_summary (void)
993 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
994 struct lto_file_decl_data *file_data;
995 unsigned int j = 0;
997 register_hooks ();
998 while ((file_data = file_data_vec[j++]))
1000 const char *data;
1001 size_t len;
1002 struct lto_input_block *ib
1003 = lto_create_simple_input_block (file_data,
1004 LTO_section_ipa_pure_const,
1005 &data, &len);
1006 if (ib)
1008 unsigned int i;
1009 unsigned int count = streamer_read_uhwi (ib);
1011 for (i = 0; i < count; i++)
1013 unsigned int index;
1014 struct cgraph_node *node;
1015 struct bitpack_d bp;
1016 funct_state fs;
1017 lto_cgraph_encoder_t encoder;
1019 fs = XCNEW (struct funct_state_d);
1020 index = streamer_read_uhwi (ib);
1021 encoder = file_data->cgraph_node_encoder;
1022 node = lto_cgraph_encoder_deref (encoder, index);
1023 set_function_state (node, fs);
1025 /* Note that the flags must be read in the opposite
1026 order in which they were written (the bitflags were
1027 pushed into FLAGS). */
1028 bp = streamer_read_bitpack (ib);
1029 fs->pure_const_state
1030 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1031 fs->state_previously_known
1032 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1033 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1034 fs->looping = bp_unpack_value (&bp, 1);
1035 fs->can_throw = bp_unpack_value (&bp, 1);
1036 if (dump_file)
1038 int flags = flags_from_decl_or_type (node->decl);
1039 fprintf (dump_file, "Read info for %s/%i ",
1040 cgraph_node_name (node),
1041 node->uid);
1042 if (flags & ECF_CONST)
1043 fprintf (dump_file, " const");
1044 if (flags & ECF_PURE)
1045 fprintf (dump_file, " pure");
1046 if (flags & ECF_NOTHROW)
1047 fprintf (dump_file, " nothrow");
1048 fprintf (dump_file, "\n pure const state: %s\n",
1049 pure_const_names[fs->pure_const_state]);
1050 fprintf (dump_file, " previously known state: %s\n",
1051 pure_const_names[fs->looping_previously_known]);
1052 if (fs->looping)
1053 fprintf (dump_file," function is locally looping\n");
1054 if (fs->looping_previously_known)
1055 fprintf (dump_file," function is previously known looping\n");
1056 if (fs->can_throw)
1057 fprintf (dump_file," function is locally throwing\n");
1061 lto_destroy_simple_input_block (file_data,
1062 LTO_section_ipa_pure_const,
1063 ib, data, len);
1069 static bool
1070 ignore_edge (struct cgraph_edge *e)
1072 return (!e->can_throw_external);
1075 /* Return true if NODE is self recursive function.
1076 ??? self recursive and indirectly recursive funcions should
1077 be the same, so this function seems unnecesary. */
1079 static bool
1080 self_recursive_p (struct cgraph_node *node)
1082 struct cgraph_edge *e;
1083 for (e = node->callees; e; e = e->next_callee)
1084 if (cgraph_function_node (e->callee, NULL) == node)
1085 return true;
1086 return false;
1089 /* Produce transitive closure over the callgraph and compute pure/const
1090 attributes. */
1092 static void
1093 propagate_pure_const (void)
1095 struct cgraph_node *node;
1096 struct cgraph_node *w;
1097 struct cgraph_node **order =
1098 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1099 int order_pos;
1100 int i;
1101 struct ipa_dfs_info * w_info;
1103 order_pos = ipa_reduced_postorder (order, true, false, NULL);
1104 if (dump_file)
1106 dump_cgraph (dump_file);
1107 ipa_print_order(dump_file, "reduced", order, order_pos);
1110 /* Propagate the local information thru the call graph to produce
1111 the global information. All the nodes within a cycle will have
1112 the same info so we collapse cycles first. Then we can do the
1113 propagation in one pass from the leaves to the roots. */
1114 for (i = 0; i < order_pos; i++ )
1116 enum pure_const_state_e pure_const_state = IPA_CONST;
1117 bool looping = false;
1118 int count = 0;
1119 node = order[i];
1121 if (node->alias)
1122 continue;
1124 if (dump_file && (dump_flags & TDF_DETAILS))
1125 fprintf (dump_file, "Starting cycle\n");
1127 /* Find the worst state for any node in the cycle. */
1128 w = node;
1129 while (w && pure_const_state != IPA_NEITHER)
1131 struct cgraph_edge *e;
1132 struct cgraph_edge *ie;
1133 int i;
1134 struct ipa_ref *ref;
1136 funct_state w_l = get_function_state (w);
1137 if (dump_file && (dump_flags & TDF_DETAILS))
1138 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1139 cgraph_node_name (w),
1140 w->uid,
1141 pure_const_names[w_l->pure_const_state],
1142 w_l->looping);
1144 /* First merge in function body properties. */
1145 worse_state (&pure_const_state, &looping,
1146 w_l->pure_const_state, w_l->looping);
1147 if (pure_const_state == IPA_NEITHER)
1148 break;
1150 /* For overwritable nodes we can not assume anything. */
1151 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1153 worse_state (&pure_const_state, &looping,
1154 w_l->state_previously_known,
1155 w_l->looping_previously_known);
1156 if (dump_file && (dump_flags & TDF_DETAILS))
1158 fprintf (dump_file,
1159 " Overwritable. state %s looping %i\n",
1160 pure_const_names[w_l->state_previously_known],
1161 w_l->looping_previously_known);
1163 break;
1166 count++;
1168 /* We consider recursive cycles as possibly infinite.
1169 This might be relaxed since infinite recursion leads to stack
1170 overflow. */
1171 if (count > 1)
1172 looping = true;
1174 /* Now walk the edges and merge in callee properties. */
1175 for (e = w->callees; e; e = e->next_callee)
1177 enum availability avail;
1178 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
1179 enum pure_const_state_e edge_state = IPA_CONST;
1180 bool edge_looping = false;
1182 if (dump_file && (dump_flags & TDF_DETAILS))
1184 fprintf (dump_file,
1185 " Call to %s/%i",
1186 cgraph_node_name (e->callee),
1187 e->callee->uid);
1189 if (avail > AVAIL_OVERWRITABLE)
1191 funct_state y_l = get_function_state (y);
1192 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file,
1195 " state:%s looping:%i\n",
1196 pure_const_names[y_l->pure_const_state],
1197 y_l->looping);
1199 if (y_l->pure_const_state > IPA_PURE
1200 && cgraph_edge_cannot_lead_to_return (e))
1202 if (dump_file && (dump_flags & TDF_DETAILS))
1203 fprintf (dump_file,
1204 " Ignoring side effects"
1205 " -> pure, looping\n");
1206 edge_state = IPA_PURE;
1207 edge_looping = true;
1209 else
1211 edge_state = y_l->pure_const_state;
1212 edge_looping = y_l->looping;
1215 else if (special_builtin_state (&edge_state, &edge_looping,
1216 y->decl))
1218 else
1219 state_from_flags (&edge_state, &edge_looping,
1220 flags_from_decl_or_type (y->decl),
1221 cgraph_edge_cannot_lead_to_return (e));
1223 /* Merge the results with what we already know. */
1224 better_state (&edge_state, &edge_looping,
1225 w_l->state_previously_known,
1226 w_l->looping_previously_known);
1227 worse_state (&pure_const_state, &looping,
1228 edge_state, edge_looping);
1229 if (pure_const_state == IPA_NEITHER)
1230 break;
1232 if (pure_const_state == IPA_NEITHER)
1233 break;
1235 /* Now process the indirect call. */
1236 for (ie = w->indirect_calls; ie; ie = ie->next_callee)
1238 enum pure_const_state_e edge_state = IPA_CONST;
1239 bool edge_looping = false;
1241 if (dump_file && (dump_flags & TDF_DETAILS))
1242 fprintf (dump_file, " Indirect call");
1243 state_from_flags (&edge_state, &edge_looping,
1244 ie->indirect_info->ecf_flags,
1245 cgraph_edge_cannot_lead_to_return (ie));
1246 /* Merge the results with what we already know. */
1247 better_state (&edge_state, &edge_looping,
1248 w_l->state_previously_known,
1249 w_l->looping_previously_known);
1250 worse_state (&pure_const_state, &looping,
1251 edge_state, edge_looping);
1252 if (pure_const_state == IPA_NEITHER)
1253 break;
1255 if (pure_const_state == IPA_NEITHER)
1256 break;
1258 /* And finally all loads and stores. */
1259 for (i = 0; ipa_ref_list_reference_iterate (&w->ref_list, i, ref); i++)
1261 enum pure_const_state_e ref_state = IPA_CONST;
1262 bool ref_looping = false;
1263 switch (ref->use)
1265 case IPA_REF_LOAD:
1266 /* readonly reads are safe. */
1267 if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
1268 break;
1269 if (dump_file && (dump_flags & TDF_DETAILS))
1270 fprintf (dump_file, " nonreadonly global var read\n");
1271 ref_state = IPA_PURE;
1272 break;
1273 case IPA_REF_STORE:
1274 if (ipa_ref_cannot_lead_to_return (ref))
1275 break;
1276 ref_state = IPA_NEITHER;
1277 if (dump_file && (dump_flags & TDF_DETAILS))
1278 fprintf (dump_file, " global var write\n");
1279 break;
1280 case IPA_REF_ADDR:
1281 break;
1283 better_state (&ref_state, &ref_looping,
1284 w_l->state_previously_known,
1285 w_l->looping_previously_known);
1286 worse_state (&pure_const_state, &looping,
1287 ref_state, ref_looping);
1288 if (pure_const_state == IPA_NEITHER)
1289 break;
1291 w_info = (struct ipa_dfs_info *) w->aux;
1292 w = w_info->next_cycle;
1294 if (dump_file && (dump_flags & TDF_DETAILS))
1295 fprintf (dump_file, "Result %s looping %i\n",
1296 pure_const_names [pure_const_state],
1297 looping);
1299 /* Copy back the region's pure_const_state which is shared by
1300 all nodes in the region. */
1301 w = node;
1302 while (w)
1304 funct_state w_l = get_function_state (w);
1305 enum pure_const_state_e this_state = pure_const_state;
1306 bool this_looping = looping;
1308 if (w_l->state_previously_known != IPA_NEITHER
1309 && this_state > w_l->state_previously_known)
1311 this_state = w_l->state_previously_known;
1312 this_looping |= w_l->looping_previously_known;
1314 if (!this_looping && self_recursive_p (w))
1315 this_looping = true;
1316 if (!w_l->looping_previously_known)
1317 this_looping = false;
1319 /* All nodes within a cycle share the same info. */
1320 w_l->pure_const_state = this_state;
1321 w_l->looping = this_looping;
1323 switch (this_state)
1325 case IPA_CONST:
1326 if (!TREE_READONLY (w->decl))
1328 warn_function_const (w->decl, !this_looping);
1329 if (dump_file)
1330 fprintf (dump_file, "Function found to be %sconst: %s\n",
1331 this_looping ? "looping " : "",
1332 cgraph_node_name (w));
1334 cgraph_set_const_flag (w, true, this_looping);
1335 break;
1337 case IPA_PURE:
1338 if (!DECL_PURE_P (w->decl))
1340 warn_function_pure (w->decl, !this_looping);
1341 if (dump_file)
1342 fprintf (dump_file, "Function found to be %spure: %s\n",
1343 this_looping ? "looping " : "",
1344 cgraph_node_name (w));
1346 cgraph_set_pure_flag (w, true, this_looping);
1347 break;
1349 default:
1350 break;
1352 w_info = (struct ipa_dfs_info *) w->aux;
1353 w = w_info->next_cycle;
1357 ipa_free_postorder_info ();
1358 free (order);
1361 /* Produce transitive closure over the callgraph and compute nothrow
1362 attributes. */
1364 static void
1365 propagate_nothrow (void)
1367 struct cgraph_node *node;
1368 struct cgraph_node *w;
1369 struct cgraph_node **order =
1370 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1371 int order_pos;
1372 int i;
1373 struct ipa_dfs_info * w_info;
1375 order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
1376 if (dump_file)
1378 dump_cgraph (dump_file);
1379 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1382 /* Propagate the local information thru the call graph to produce
1383 the global information. All the nodes within a cycle will have
1384 the same info so we collapse cycles first. Then we can do the
1385 propagation in one pass from the leaves to the roots. */
1386 for (i = 0; i < order_pos; i++ )
1388 bool can_throw = false;
1389 node = order[i];
1391 if (node->alias)
1392 continue;
1394 /* Find the worst state for any node in the cycle. */
1395 w = node;
1396 while (w)
1398 struct cgraph_edge *e, *ie;
1399 funct_state w_l = get_function_state (w);
1401 if (w_l->can_throw
1402 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1403 can_throw = true;
1405 if (can_throw)
1406 break;
1408 for (e = w->callees; e; e = e->next_callee)
1410 enum availability avail;
1411 struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
1413 if (avail > AVAIL_OVERWRITABLE)
1415 funct_state y_l = get_function_state (y);
1417 if (can_throw)
1418 break;
1419 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1420 && e->can_throw_external)
1421 can_throw = true;
1423 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1424 can_throw = true;
1426 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1427 if (ie->can_throw_external)
1428 can_throw = true;
1429 w_info = (struct ipa_dfs_info *) w->aux;
1430 w = w_info->next_cycle;
1433 /* Copy back the region's pure_const_state which is shared by
1434 all nodes in the region. */
1435 w = node;
1436 while (w)
1438 funct_state w_l = get_function_state (w);
1439 if (!can_throw && !TREE_NOTHROW (w->decl))
1441 cgraph_set_nothrow_flag (w, true);
1442 if (dump_file)
1443 fprintf (dump_file, "Function found to be nothrow: %s\n",
1444 cgraph_node_name (w));
1446 else if (can_throw && !TREE_NOTHROW (w->decl))
1447 w_l->can_throw = true;
1448 w_info = (struct ipa_dfs_info *) w->aux;
1449 w = w_info->next_cycle;
1453 ipa_free_postorder_info ();
1454 free (order);
1458 /* Produce the global information by preforming a transitive closure
1459 on the local information that was produced by generate_summary. */
1461 static unsigned int
1462 propagate (void)
1464 struct cgraph_node *node;
1466 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1467 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1468 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1470 /* Nothrow makes more function to not lead to return and improve
1471 later analysis. */
1472 propagate_nothrow ();
1473 propagate_pure_const ();
1475 /* Cleanup. */
1476 for (node = cgraph_nodes; node; node = node->next)
1477 if (has_function_state (node))
1478 free (get_function_state (node));
1479 VEC_free (funct_state, heap, funct_state_vec);
1480 finish_state ();
1481 return 0;
1484 static bool
1485 gate_pure_const (void)
1487 return (flag_ipa_pure_const
1488 /* Don't bother doing anything if the program has errors. */
1489 && !seen_error ());
1492 struct ipa_opt_pass_d pass_ipa_pure_const =
1495 IPA_PASS,
1496 "pure-const", /* name */
1497 gate_pure_const, /* gate */
1498 propagate, /* execute */
1499 NULL, /* sub */
1500 NULL, /* next */
1501 0, /* static_pass_number */
1502 TV_IPA_PURE_CONST, /* tv_id */
1503 0, /* properties_required */
1504 0, /* properties_provided */
1505 0, /* properties_destroyed */
1506 0, /* todo_flags_start */
1507 0 /* todo_flags_finish */
1509 generate_summary, /* generate_summary */
1510 pure_const_write_summary, /* write_summary */
1511 pure_const_read_summary, /* read_summary */
1512 NULL, /* write_optimization_summary */
1513 NULL, /* read_optimization_summary */
1514 NULL, /* stmt_fixup */
1515 0, /* TODOs */
1516 NULL, /* function_transform */
1517 NULL /* variable_transform */
1520 /* Return true if function should be skipped for local pure const analysis. */
1522 static bool
1523 skip_function_for_local_pure_const (struct cgraph_node *node)
1525 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1526 we must not promote functions that are called by already processed functions. */
1528 if (function_called_by_processed_nodes_p ())
1530 if (dump_file)
1531 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1532 return true;
1534 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1536 if (dump_file)
1537 fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
1538 return true;
1540 return false;
1543 /* Simple local pass for pure const discovery reusing the analysis from
1544 ipa_pure_const. This pass is effective when executed together with
1545 other optimization passes in early optimization pass queue. */
1547 static unsigned int
1548 local_pure_const (void)
1550 bool changed = false;
1551 funct_state l;
1552 bool skip;
1553 struct cgraph_node *node;
1555 node = cgraph_get_node (current_function_decl);
1556 skip = skip_function_for_local_pure_const (node);
1557 if (!warn_suggest_attribute_const
1558 && !warn_suggest_attribute_pure
1559 && skip)
1560 return 0;
1562 l = analyze_function (node, false);
1564 /* Do NORETURN discovery. */
1565 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1566 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1568 warn_function_noreturn (cfun->decl);
1569 if (dump_file)
1570 fprintf (dump_file, "Function found to be noreturn: %s\n",
1571 lang_hooks.decl_printable_name (current_function_decl, 2));
1573 /* Update declaration and reduce profile to executed once. */
1574 TREE_THIS_VOLATILE (current_function_decl) = 1;
1575 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1576 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1578 changed = true;
1581 switch (l->pure_const_state)
1583 case IPA_CONST:
1584 if (!TREE_READONLY (current_function_decl))
1586 warn_function_const (current_function_decl, !l->looping);
1587 if (!skip)
1589 cgraph_set_const_flag (node, true, l->looping);
1590 changed = true;
1592 if (dump_file)
1593 fprintf (dump_file, "Function found to be %sconst: %s\n",
1594 l->looping ? "looping " : "",
1595 lang_hooks.decl_printable_name (current_function_decl,
1596 2));
1598 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1599 && !l->looping)
1601 if (!skip)
1603 cgraph_set_const_flag (node, true, false);
1604 changed = true;
1606 if (dump_file)
1607 fprintf (dump_file, "Function found to be non-looping: %s\n",
1608 lang_hooks.decl_printable_name (current_function_decl,
1609 2));
1611 break;
1613 case IPA_PURE:
1614 if (!DECL_PURE_P (current_function_decl))
1616 if (!skip)
1618 cgraph_set_pure_flag (node, true, l->looping);
1619 changed = true;
1621 warn_function_pure (current_function_decl, !l->looping);
1622 if (dump_file)
1623 fprintf (dump_file, "Function found to be %spure: %s\n",
1624 l->looping ? "looping " : "",
1625 lang_hooks.decl_printable_name (current_function_decl,
1626 2));
1628 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1629 && !l->looping)
1631 if (!skip)
1633 cgraph_set_pure_flag (node, true, false);
1634 changed = true;
1636 if (dump_file)
1637 fprintf (dump_file, "Function found to be non-looping: %s\n",
1638 lang_hooks.decl_printable_name (current_function_decl,
1639 2));
1641 break;
1643 default:
1644 break;
1646 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1648 cgraph_set_nothrow_flag (node, true);
1649 changed = true;
1650 if (dump_file)
1651 fprintf (dump_file, "Function found to be nothrow: %s\n",
1652 lang_hooks.decl_printable_name (current_function_decl,
1653 2));
1655 free (l);
1656 if (changed)
1657 return execute_fixup_cfg ();
1658 else
1659 return 0;
1662 struct gimple_opt_pass pass_local_pure_const =
1665 GIMPLE_PASS,
1666 "local-pure-const", /* name */
1667 gate_pure_const, /* gate */
1668 local_pure_const, /* execute */
1669 NULL, /* sub */
1670 NULL, /* next */
1671 0, /* static_pass_number */
1672 TV_IPA_PURE_CONST, /* tv_id */
1673 0, /* properties_required */
1674 0, /* properties_provided */
1675 0, /* properties_destroyed */
1676 0, /* todo_flags_start */
1677 0 /* todo_flags_finish */