gcc/fortran/:
[official-gcc.git] / gcc / ipa-pure-const.c
blob766c8fc6456be38db5249ce5b4365e22df49f5c2
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
31 /* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
35 #include "config.h"
36 #include "system.h"
37 #include "coretypes.h"
38 #include "tm.h"
39 #include "tree.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
45 #include "ggc.h"
46 #include "ipa-utils.h"
47 #include "gimple.h"
48 #include "cgraph.h"
49 #include "output.h"
50 #include "flags.h"
51 #include "timevar.h"
52 #include "toplev.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "langhooks.h"
56 #include "target.h"
57 #include "lto-streamer.h"
58 #include "cfgloop.h"
59 #include "tree-scalar-evolution.h"
60 #include "intl.h"
61 #include "opts.h"
63 static struct pointer_set_t *visited_nodes;
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68 enum pure_const_state_e
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
75 const char *pure_const_names[3] = {"const", "pure", "neither"};
77 /* Holder for the const_state. There is one of these per function
78 decl. */
79 struct funct_state_d
81 /* See above. */
82 enum pure_const_state_e pure_const_state;
83 /* What user set here; we can be always sure about this. */
84 enum pure_const_state_e state_previously_known;
85 bool looping_previously_known;
87 /* True if the function could possibly infinite loop. There are a
88 lot of ways that this could be determined. We are pretty
89 conservative here. While it is possible to cse pure and const
90 calls, it is not legal to have dce get rid of the call if there
91 is a possibility that the call could infinite loop since this is
92 a behavioral change. */
93 bool looping;
95 bool can_throw;
98 typedef struct funct_state_d * funct_state;
100 /* The storage of the funct_state is abstracted because there is the
101 possibility that it may be desirable to move this to the cgraph
102 local info. */
104 /* Array, indexed by cgraph node uid, of function states. */
106 DEF_VEC_P (funct_state);
107 DEF_VEC_ALLOC_P (funct_state, heap);
108 static VEC (funct_state, heap) *funct_state_vec;
110 /* Holders of ipa cgraph hooks: */
111 static struct cgraph_node_hook_list *function_insertion_hook_holder;
112 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
113 static struct cgraph_node_hook_list *node_removal_hook_holder;
115 /* Try to guess if function body will always be visible to compiler
116 when compiling the call and whether compiler will be able
117 to propagate the information by itself. */
119 static bool
120 function_always_visible_to_compiler_p (tree decl)
122 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
125 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
126 is true if the function is known to be finite. The diagnostic is
127 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
128 OPTION, this function may initialize it and it is always returned
129 by the function. */
131 static struct pointer_set_t *
132 suggest_attribute (int option, tree decl, bool known_finite,
133 struct pointer_set_t *warned_about,
134 const char * attrib_name)
136 if (!option_enabled (option))
137 return warned_about;
138 if (TREE_THIS_VOLATILE (decl)
139 || (known_finite && function_always_visible_to_compiler_p (decl)))
140 return warned_about;
142 if (!warned_about)
143 warned_about = pointer_set_create ();
144 if (pointer_set_contains (warned_about, decl))
145 return warned_about;
146 pointer_set_insert (warned_about, decl);
147 warning_at (DECL_SOURCE_LOCATION (decl),
148 option,
149 known_finite
150 ? _("function might be candidate for attribute %<%s%>")
151 : _("function might be candidate for attribute %<%s%>"
152 " if it is known to return normally"), attrib_name);
153 return warned_about;
156 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
157 is true if the function is known to be finite. */
159 static void
160 warn_function_pure (tree decl, bool known_finite)
162 static struct pointer_set_t *warned_about;
164 warned_about
165 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
166 known_finite, warned_about, "pure");
169 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
170 is true if the function is known to be finite. */
172 static void
173 warn_function_const (tree decl, bool known_finite)
175 static struct pointer_set_t *warned_about;
176 warned_about
177 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
178 known_finite, warned_about, "const");
180 /* Init the function state. */
182 static void
183 finish_state (void)
185 free (funct_state_vec);
189 /* Return true if we have a function state for NODE. */
191 static inline bool
192 has_function_state (struct cgraph_node *node)
194 if (!funct_state_vec
195 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
196 return false;
197 return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
200 /* Return the function state from NODE. */
202 static inline funct_state
203 get_function_state (struct cgraph_node *node)
205 static struct funct_state_d varying
206 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
207 if (!funct_state_vec
208 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
209 /* We might want to put correct previously_known state into varying. */
210 return &varying;
211 return VEC_index (funct_state, funct_state_vec, node->uid);
214 /* Set the function state S for NODE. */
216 static inline void
217 set_function_state (struct cgraph_node *node, funct_state s)
219 if (!funct_state_vec
220 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
221 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
222 VEC_replace (funct_state, funct_state_vec, node->uid, s);
225 /* Check to see if the use (or definition when CHECKING_WRITE is true)
226 variable T is legal in a function that is either pure or const. */
228 static inline void
229 check_decl (funct_state local,
230 tree t, bool checking_write, bool ipa)
232 /* Do not want to do anything with volatile except mark any
233 function that uses one to be not const or pure. */
234 if (TREE_THIS_VOLATILE (t))
236 local->pure_const_state = IPA_NEITHER;
237 if (dump_file)
238 fprintf (dump_file, " Volatile operand is not const/pure");
239 return;
242 /* Do not care about a local automatic that is not static. */
243 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
244 return;
246 /* If the variable has the "used" attribute, treat it as if it had a
247 been touched by the devil. */
248 if (DECL_PRESERVE_P (t))
250 local->pure_const_state = IPA_NEITHER;
251 if (dump_file)
252 fprintf (dump_file, " Used static/global variable is not const/pure\n");
253 return;
256 /* In IPA mode we are not interested in checking actual loads and stores;
257 they will be processed at propagation time using ipa_ref. */
258 if (ipa)
259 return;
261 /* Since we have dealt with the locals and params cases above, if we
262 are CHECKING_WRITE, this cannot be a pure or constant
263 function. */
264 if (checking_write)
266 local->pure_const_state = IPA_NEITHER;
267 if (dump_file)
268 fprintf (dump_file, " static/global memory write is not const/pure\n");
269 return;
272 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
274 /* Readonly reads are safe. */
275 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
276 return; /* Read of a constant, do not change the function state. */
277 else
279 if (dump_file)
280 fprintf (dump_file, " global memory read is not const\n");
281 /* Just a regular read. */
282 if (local->pure_const_state == IPA_CONST)
283 local->pure_const_state = IPA_PURE;
286 else
288 /* Compilation level statics can be read if they are readonly
289 variables. */
290 if (TREE_READONLY (t))
291 return;
293 if (dump_file)
294 fprintf (dump_file, " static memory read is not const\n");
295 /* Just a regular read. */
296 if (local->pure_const_state == IPA_CONST)
297 local->pure_const_state = IPA_PURE;
302 /* Check to see if the use (or definition when CHECKING_WRITE is true)
303 variable T is legal in a function that is either pure or const. */
305 static inline void
306 check_op (funct_state local, tree t, bool checking_write)
308 t = get_base_address (t);
309 if (t && TREE_THIS_VOLATILE (t))
311 local->pure_const_state = IPA_NEITHER;
312 if (dump_file)
313 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
314 return;
316 else if (t
317 && INDIRECT_REF_P (t)
318 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
319 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
321 if (dump_file)
322 fprintf (dump_file, " Indirect ref to local memory is OK\n");
323 return;
325 else if (checking_write)
327 local->pure_const_state = IPA_NEITHER;
328 if (dump_file)
329 fprintf (dump_file, " Indirect ref write is not const/pure\n");
330 return;
332 else
334 if (dump_file)
335 fprintf (dump_file, " Indirect ref read is not const\n");
336 if (local->pure_const_state == IPA_CONST)
337 local->pure_const_state = IPA_PURE;
341 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
343 static void
344 state_from_flags (enum pure_const_state_e *state, bool *looping,
345 int flags, bool cannot_lead_to_return)
347 *looping = false;
348 if (flags & ECF_LOOPING_CONST_OR_PURE)
350 *looping = true;
351 if (dump_file && (dump_flags & TDF_DETAILS))
352 fprintf (dump_file, " looping");
354 if (flags & ECF_CONST)
356 *state = IPA_CONST;
357 if (dump_file && (dump_flags & TDF_DETAILS))
358 fprintf (dump_file, " const\n");
360 else if (flags & ECF_PURE)
362 *state = IPA_PURE;
363 if (dump_file && (dump_flags & TDF_DETAILS))
364 fprintf (dump_file, " pure\n");
366 else if (cannot_lead_to_return)
368 *state = IPA_PURE;
369 *looping = true;
370 if (dump_file && (dump_flags & TDF_DETAILS))
371 fprintf (dump_file, " ignoring side effects->pure looping\n");
373 else
375 if (dump_file && (dump_flags & TDF_DETAILS))
376 fprintf (dump_file, " neihter\n");
377 *state = IPA_NEITHER;
378 *looping = true;
382 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
383 into STATE and LOOPING better of the two variants.
384 Be sure to merge looping correctly. IPA_NEITHER functions
385 have looping 0 even if they don't have to return. */
387 static inline void
388 better_state (enum pure_const_state_e *state, bool *looping,
389 enum pure_const_state_e state2, bool looping2)
391 if (state2 < *state)
393 if (*state == IPA_NEITHER)
394 *looping = looping2;
395 else
396 *looping = MIN (*looping, looping2);
398 else if (state2 != IPA_NEITHER)
399 *looping = MIN (*looping, looping2);
402 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
403 into STATE and LOOPING worse of the two variants. */
405 static inline void
406 worse_state (enum pure_const_state_e *state, bool *looping,
407 enum pure_const_state_e state2, bool looping2)
409 *state = MAX (*state, state2);
410 *looping = MAX (*looping, looping2);
413 /* Check the parameters of a function call to CALL_EXPR to see if
414 there are any references in the parameters that are not allowed for
415 pure or const functions. Also check to see if this is either an
416 indirect call, a call outside the compilation unit, or has special
417 attributes that may also effect the purity. The CALL_EXPR node for
418 the entire call expression. */
420 static void
421 check_call (funct_state local, gimple call, bool ipa)
423 int flags = gimple_call_flags (call);
424 tree callee_t = gimple_call_fndecl (call);
425 bool possibly_throws = stmt_could_throw_p (call);
426 bool possibly_throws_externally = (possibly_throws
427 && stmt_can_throw_external (call));
429 if (possibly_throws)
431 unsigned int i;
432 for (i = 0; i < gimple_num_ops (call); i++)
433 if (gimple_op (call, i)
434 && tree_could_throw_p (gimple_op (call, i)))
436 if (possibly_throws && cfun->can_throw_non_call_exceptions)
438 if (dump_file)
439 fprintf (dump_file, " operand can throw; looping\n");
440 local->looping = true;
442 if (possibly_throws_externally)
444 if (dump_file)
445 fprintf (dump_file, " operand can throw externally\n");
446 local->can_throw = true;
451 /* The const and pure flags are set by a variety of places in the
452 compiler (including here). If someone has already set the flags
453 for the callee, (such as for some of the builtins) we will use
454 them, otherwise we will compute our own information.
456 Const and pure functions have less clobber effects than other
457 functions so we process these first. Otherwise if it is a call
458 outside the compilation unit or an indirect call we punt. This
459 leaves local calls which will be processed by following the call
460 graph. */
461 if (callee_t)
463 /* built_in_return is really just an return statemnt. */
464 if (gimple_call_builtin_p (call, BUILT_IN_RETURN))
465 return;
466 /* When bad things happen to bad functions, they cannot be const
467 or pure. */
468 if (setjmp_call_p (callee_t))
470 if (dump_file)
471 fprintf (dump_file, " setjmp is not const/pure\n");
472 local->looping = true;
473 local->pure_const_state = IPA_NEITHER;
476 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
477 switch (DECL_FUNCTION_CODE (callee_t))
479 case BUILT_IN_LONGJMP:
480 case BUILT_IN_NONLOCAL_GOTO:
481 if (dump_file)
482 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
483 local->pure_const_state = IPA_NEITHER;
484 local->looping = true;
485 break;
486 default:
487 break;
491 /* When not in IPA mode, we can still handle self recursion. */
492 if (!ipa && callee_t == current_function_decl)
494 if (dump_file)
495 fprintf (dump_file, " Recursive call can loop.\n");
496 local->looping = true;
498 /* Either calle is unknown or we are doing local analysis.
499 Look to see if there are any bits available for the callee (such as by
500 declaration or because it is builtin) and process solely on the basis of
501 those bits. */
502 else if (!ipa)
504 enum pure_const_state_e call_state;
505 bool call_looping;
506 if (possibly_throws && cfun->can_throw_non_call_exceptions)
508 if (dump_file)
509 fprintf (dump_file, " can throw; looping\n");
510 local->looping = true;
512 if (possibly_throws_externally)
514 if (dump_file)
516 fprintf (dump_file, " can throw externally to lp %i\n",
517 lookup_stmt_eh_lp (call));
518 if (callee_t)
519 fprintf (dump_file, " callee:%s\n",
520 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
522 local->can_throw = true;
524 if (dump_file && (dump_flags & TDF_DETAILS))
525 fprintf (dump_file, " checking flags for call:");
526 state_from_flags (&call_state, &call_looping, flags,
527 ((flags & (ECF_NORETURN | ECF_NOTHROW))
528 == (ECF_NORETURN | ECF_NOTHROW))
529 || (!flag_exceptions && (flags & ECF_NORETURN)));
530 worse_state (&local->pure_const_state, &local->looping,
531 call_state, call_looping);
533 /* Direct functions calls are handled by IPA propagation. */
536 /* Wrapper around check_decl for loads in local more. */
538 static bool
539 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
541 if (DECL_P (op))
542 check_decl ((funct_state)data, op, false, false);
543 else
544 check_op ((funct_state)data, op, false);
545 return false;
548 /* Wrapper around check_decl for stores in local more. */
550 static bool
551 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
553 if (DECL_P (op))
554 check_decl ((funct_state)data, op, true, false);
555 else
556 check_op ((funct_state)data, op, true);
557 return false;
560 /* Wrapper around check_decl for loads in ipa mode. */
562 static bool
563 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
565 if (DECL_P (op))
566 check_decl ((funct_state)data, op, false, true);
567 else
568 check_op ((funct_state)data, op, false);
569 return false;
572 /* Wrapper around check_decl for stores in ipa mode. */
574 static bool
575 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
577 if (DECL_P (op))
578 check_decl ((funct_state)data, op, true, true);
579 else
580 check_op ((funct_state)data, op, true);
581 return false;
584 /* Look into pointer pointed to by GSIP and figure out what interesting side
585 effects it has. */
586 static void
587 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
589 gimple stmt = gsi_stmt (*gsip);
590 unsigned int i = 0;
592 if (is_gimple_debug (stmt))
593 return;
595 if (dump_file)
597 fprintf (dump_file, " scanning: ");
598 print_gimple_stmt (dump_file, stmt, 0, 0);
601 /* Look for loads and stores. */
602 walk_stmt_load_store_ops (stmt, local,
603 ipa ? check_ipa_load : check_load,
604 ipa ? check_ipa_store : check_store);
606 if (gimple_code (stmt) != GIMPLE_CALL
607 && stmt_could_throw_p (stmt))
609 if (cfun->can_throw_non_call_exceptions)
611 if (dump_file)
612 fprintf (dump_file, " can throw; looping");
613 local->looping = true;
615 if (stmt_can_throw_external (stmt))
617 if (dump_file)
618 fprintf (dump_file, " can throw externally");
619 local->can_throw = true;
622 switch (gimple_code (stmt))
624 case GIMPLE_CALL:
625 check_call (local, stmt, ipa);
626 break;
627 case GIMPLE_LABEL:
628 if (DECL_NONLOCAL (gimple_label_label (stmt)))
629 /* Target of long jump. */
631 if (dump_file)
632 fprintf (dump_file, " nonlocal label is not const/pure");
633 local->pure_const_state = IPA_NEITHER;
635 break;
636 case GIMPLE_ASM:
637 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
639 tree op = gimple_asm_clobber_op (stmt, i);
640 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
642 if (dump_file)
643 fprintf (dump_file, " memory asm clobber is not const/pure");
644 /* Abandon all hope, ye who enter here. */
645 local->pure_const_state = IPA_NEITHER;
648 if (gimple_asm_volatile_p (stmt))
650 if (dump_file)
651 fprintf (dump_file, " volatile is not const/pure");
652 /* Abandon all hope, ye who enter here. */
653 local->pure_const_state = IPA_NEITHER;
654 local->looping = true;
656 return;
657 default:
658 break;
663 /* This is the main routine for finding the reference patterns for
664 global variables within a function FN. */
666 static funct_state
667 analyze_function (struct cgraph_node *fn, bool ipa)
669 tree decl = fn->decl;
670 tree old_decl = current_function_decl;
671 funct_state l;
672 basic_block this_block;
674 l = XCNEW (struct funct_state_d);
675 l->pure_const_state = IPA_CONST;
676 l->state_previously_known = IPA_NEITHER;
677 l->looping_previously_known = true;
678 l->looping = false;
679 l->can_throw = false;
681 if (dump_file)
683 fprintf (dump_file, "\n\n local analysis of %s\n ",
684 cgraph_node_name (fn));
687 push_cfun (DECL_STRUCT_FUNCTION (decl));
688 current_function_decl = decl;
690 FOR_EACH_BB (this_block)
692 gimple_stmt_iterator gsi;
693 struct walk_stmt_info wi;
695 memset (&wi, 0, sizeof(wi));
696 for (gsi = gsi_start_bb (this_block);
697 !gsi_end_p (gsi);
698 gsi_next (&gsi))
700 check_stmt (&gsi, l, ipa);
701 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
702 goto end;
706 end:
707 if (l->pure_const_state != IPA_NEITHER)
709 /* Const functions cannot have back edges (an
710 indication of possible infinite loop side
711 effect. */
712 if (mark_dfs_back_edges ())
714 /* Preheaders are needed for SCEV to work.
715 Simple lateches and recorded exits improve chances that loop will
716 proved to be finite in testcases such as in loop-15.c and loop-24.c */
717 loop_optimizer_init (LOOPS_NORMAL
718 | LOOPS_HAVE_RECORDED_EXITS);
719 if (dump_file && (dump_flags & TDF_DETAILS))
720 flow_loops_dump (dump_file, NULL, 0);
721 if (mark_irreducible_loops ())
723 if (dump_file)
724 fprintf (dump_file, " has irreducible loops\n");
725 l->looping = true;
727 else
729 loop_iterator li;
730 struct loop *loop;
731 scev_initialize ();
732 FOR_EACH_LOOP (li, loop, 0)
733 if (!finite_loop_p (loop))
735 if (dump_file)
736 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
737 l->looping =true;
738 break;
740 scev_finalize ();
742 loop_optimizer_finalize ();
746 if (dump_file && (dump_flags & TDF_DETAILS))
747 fprintf (dump_file, " checking previously known:");
748 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
749 flags_from_decl_or_type (fn->decl),
750 cgraph_node_cannot_return (fn));
752 better_state (&l->pure_const_state, &l->looping,
753 l->state_previously_known,
754 l->looping_previously_known);
755 if (TREE_NOTHROW (decl))
756 l->can_throw = false;
758 pop_cfun ();
759 current_function_decl = old_decl;
760 if (dump_file)
762 if (l->looping)
763 fprintf (dump_file, "Function is locally looping.\n");
764 if (l->can_throw)
765 fprintf (dump_file, "Function is locally throwing.\n");
766 if (l->pure_const_state == IPA_CONST)
767 fprintf (dump_file, "Function is locally const.\n");
768 if (l->pure_const_state == IPA_PURE)
769 fprintf (dump_file, "Function is locally pure.\n");
771 return l;
774 /* Called when new function is inserted to callgraph late. */
775 static void
776 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
778 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
779 return;
780 /* There are some shared nodes, in particular the initializers on
781 static declarations. We do not need to scan them more than once
782 since all we would be interested in are the addressof
783 operations. */
784 visited_nodes = pointer_set_create ();
785 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
786 set_function_state (node, analyze_function (node, true));
787 pointer_set_destroy (visited_nodes);
788 visited_nodes = NULL;
791 /* Called when new clone is inserted to callgraph late. */
793 static void
794 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
795 void *data ATTRIBUTE_UNUSED)
797 if (has_function_state (src))
799 funct_state l = XNEW (struct funct_state_d);
800 gcc_assert (!has_function_state (dst));
801 memcpy (l, get_function_state (src), sizeof (*l));
802 set_function_state (dst, l);
806 /* Called when new clone is inserted to callgraph late. */
808 static void
809 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
811 if (has_function_state (node))
813 free (get_function_state (node));
814 set_function_state (node, NULL);
819 static void
820 register_hooks (void)
822 static bool init_p = false;
824 if (init_p)
825 return;
827 init_p = true;
829 node_removal_hook_holder =
830 cgraph_add_node_removal_hook (&remove_node_data, NULL);
831 node_duplication_hook_holder =
832 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
833 function_insertion_hook_holder =
834 cgraph_add_function_insertion_hook (&add_new_function, NULL);
838 /* Analyze each function in the cgraph to see if it is locally PURE or
839 CONST. */
841 static void
842 generate_summary (void)
844 struct cgraph_node *node;
846 register_hooks ();
848 /* There are some shared nodes, in particular the initializers on
849 static declarations. We do not need to scan them more than once
850 since all we would be interested in are the addressof
851 operations. */
852 visited_nodes = pointer_set_create ();
854 /* Process all of the functions.
856 We process AVAIL_OVERWRITABLE functions. We can not use the results
857 by default, but the info can be used at LTO with -fwhole-program or
858 when function got clonned and the clone is AVAILABLE. */
860 for (node = cgraph_nodes; node; node = node->next)
861 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
862 set_function_state (node, analyze_function (node, true));
864 pointer_set_destroy (visited_nodes);
865 visited_nodes = NULL;
869 /* Serialize the ipa info for lto. */
871 static void
872 pure_const_write_summary (cgraph_node_set set,
873 varpool_node_set vset ATTRIBUTE_UNUSED)
875 struct cgraph_node *node;
876 struct lto_simple_output_block *ob
877 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
878 unsigned int count = 0;
879 cgraph_node_set_iterator csi;
881 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
883 node = csi_node (csi);
884 if (node->analyzed && has_function_state (node))
885 count++;
888 lto_output_uleb128_stream (ob->main_stream, count);
890 /* Process all of the functions. */
891 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
893 node = csi_node (csi);
894 if (node->analyzed && has_function_state (node))
896 struct bitpack_d *bp;
897 funct_state fs;
898 int node_ref;
899 lto_cgraph_encoder_t encoder;
901 fs = get_function_state (node);
903 encoder = ob->decl_state->cgraph_node_encoder;
904 node_ref = lto_cgraph_encoder_encode (encoder, node);
905 lto_output_uleb128_stream (ob->main_stream, node_ref);
907 /* Note that flags will need to be read in the opposite
908 order as we are pushing the bitflags into FLAGS. */
909 bp = bitpack_create ();
910 bp_pack_value (bp, fs->pure_const_state, 2);
911 bp_pack_value (bp, fs->state_previously_known, 2);
912 bp_pack_value (bp, fs->looping_previously_known, 1);
913 bp_pack_value (bp, fs->looping, 1);
914 bp_pack_value (bp, fs->can_throw, 1);
915 lto_output_bitpack (ob->main_stream, bp);
916 bitpack_delete (bp);
920 lto_destroy_simple_output_block (ob);
924 /* Deserialize the ipa info for lto. */
926 static void
927 pure_const_read_summary (void)
929 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
930 struct lto_file_decl_data *file_data;
931 unsigned int j = 0;
933 register_hooks ();
934 while ((file_data = file_data_vec[j++]))
936 const char *data;
937 size_t len;
938 struct lto_input_block *ib
939 = lto_create_simple_input_block (file_data,
940 LTO_section_ipa_pure_const,
941 &data, &len);
942 if (ib)
944 unsigned int i;
945 unsigned int count = lto_input_uleb128 (ib);
947 for (i = 0; i < count; i++)
949 unsigned int index;
950 struct cgraph_node *node;
951 struct bitpack_d *bp;
952 funct_state fs;
953 lto_cgraph_encoder_t encoder;
955 fs = XCNEW (struct funct_state_d);
956 index = lto_input_uleb128 (ib);
957 encoder = file_data->cgraph_node_encoder;
958 node = lto_cgraph_encoder_deref (encoder, index);
959 set_function_state (node, fs);
961 /* Note that the flags must be read in the opposite
962 order in which they were written (the bitflags were
963 pushed into FLAGS). */
964 bp = lto_input_bitpack (ib);
965 fs->pure_const_state
966 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
967 fs->state_previously_known
968 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
969 fs->looping_previously_known = bp_unpack_value (bp, 1);
970 fs->looping = bp_unpack_value (bp, 1);
971 fs->can_throw = bp_unpack_value (bp, 1);
972 if (dump_file)
974 int flags = flags_from_decl_or_type (node->decl);
975 fprintf (dump_file, "Read info for %s/%i ",
976 cgraph_node_name (node),
977 node->uid);
978 if (flags & ECF_CONST)
979 fprintf (dump_file, " const");
980 if (flags & ECF_PURE)
981 fprintf (dump_file, " pure");
982 if (flags & ECF_NOTHROW)
983 fprintf (dump_file, " nothrow");
984 fprintf (dump_file, "\n pure const state: %s\n",
985 pure_const_names[fs->pure_const_state]);
986 fprintf (dump_file, " previously known state: %s\n",
987 pure_const_names[fs->looping_previously_known]);
988 if (fs->looping)
989 fprintf (dump_file," function is locally looping\n");
990 if (fs->looping_previously_known)
991 fprintf (dump_file," function is previously known looping\n");
992 if (fs->can_throw)
993 fprintf (dump_file," function is locally throwing\n");
995 bitpack_delete (bp);
998 lto_destroy_simple_input_block (file_data,
999 LTO_section_ipa_pure_const,
1000 ib, data, len);
1006 static bool
1007 ignore_edge (struct cgraph_edge *e)
1009 return (!e->can_throw_external);
1012 /* Return true if NODE is self recursive function. */
1014 static bool
1015 self_recursive_p (struct cgraph_node *node)
1017 struct cgraph_edge *e;
1018 for (e = node->callees; e; e = e->next_callee)
1019 if (e->callee == node)
1020 return true;
1021 return false;
1024 /* Produce transitive closure over the callgraph and compute pure/const
1025 attributes. */
1027 static void
1028 propagate_pure_const (void)
1030 struct cgraph_node *node;
1031 struct cgraph_node *w;
1032 struct cgraph_node **order =
1033 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1034 int order_pos;
1035 int i;
1036 struct ipa_dfs_info * w_info;
1038 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
1039 if (dump_file)
1041 dump_cgraph (dump_file);
1042 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
1045 /* Propagate the local information thru the call graph to produce
1046 the global information. All the nodes within a cycle will have
1047 the same info so we collapse cycles first. Then we can do the
1048 propagation in one pass from the leaves to the roots. */
1049 for (i = 0; i < order_pos; i++ )
1051 enum pure_const_state_e pure_const_state = IPA_CONST;
1052 bool looping = false;
1053 int count = 0;
1054 node = order[i];
1056 if (dump_file && (dump_flags & TDF_DETAILS))
1057 fprintf (dump_file, "Starting cycle\n");
1059 /* Find the worst state for any node in the cycle. */
1060 w = node;
1061 while (w && pure_const_state != IPA_NEITHER)
1063 struct cgraph_edge *e;
1064 struct cgraph_edge *ie;
1065 int i;
1066 struct ipa_ref *ref;
1068 funct_state w_l = get_function_state (w);
1069 if (dump_file && (dump_flags & TDF_DETAILS))
1070 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1071 cgraph_node_name (w),
1072 w->uid,
1073 pure_const_names[w_l->pure_const_state],
1074 w_l->looping);
1076 /* First merge in function body properties. */
1077 worse_state (&pure_const_state, &looping,
1078 w_l->pure_const_state, w_l->looping);
1079 if (pure_const_state == IPA_NEITHER)
1080 break;
1082 /* For overwritable nodes we can not assume anything. */
1083 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1085 worse_state (&pure_const_state, &looping,
1086 w_l->state_previously_known,
1087 w_l->looping_previously_known);
1088 if (dump_file && (dump_flags & TDF_DETAILS))
1090 fprintf (dump_file,
1091 " Overwritable. state %s looping %i\n",
1092 pure_const_names[w_l->state_previously_known],
1093 w_l->looping_previously_known);
1095 break;
1098 count++;
1100 /* We consider recursive cycles as possibly infinite.
1101 This might be relaxed since infinite recursion leads to stack
1102 overflow. */
1103 if (count > 1)
1104 looping = true;
1106 /* Now walk the edges and merge in callee properties. */
1107 for (e = w->callees; e; e = e->next_callee)
1109 struct cgraph_node *y = e->callee;
1110 enum pure_const_state_e edge_state = IPA_CONST;
1111 bool edge_looping = false;
1113 if (dump_file && (dump_flags & TDF_DETAILS))
1115 fprintf (dump_file,
1116 " Call to %s/%i",
1117 cgraph_node_name (e->callee),
1118 e->callee->uid);
1120 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1122 funct_state y_l = get_function_state (y);
1123 if (dump_file && (dump_flags & TDF_DETAILS))
1125 fprintf (dump_file,
1126 " state:%s looping:%i\n",
1127 pure_const_names[y_l->pure_const_state],
1128 y_l->looping);
1130 if (y_l->pure_const_state > IPA_PURE
1131 && cgraph_edge_cannot_lead_to_return (e))
1133 if (dump_file && (dump_flags & TDF_DETAILS))
1134 fprintf (dump_file,
1135 " Ignoring side effects"
1136 " -> pure, looping\n");
1137 edge_state = IPA_PURE;
1138 edge_looping = true;
1140 else
1142 edge_state = y_l->pure_const_state;
1143 edge_looping = y_l->looping;
1146 else
1147 state_from_flags (&edge_state, &edge_looping,
1148 flags_from_decl_or_type (y->decl),
1149 cgraph_edge_cannot_lead_to_return (e));
1151 /* Merge the results with what we already know. */
1152 better_state (&edge_state, &edge_looping,
1153 w_l->state_previously_known,
1154 w_l->looping_previously_known);
1155 worse_state (&pure_const_state, &looping,
1156 edge_state, edge_looping);
1157 if (pure_const_state == IPA_NEITHER)
1158 break;
1160 if (pure_const_state == IPA_NEITHER)
1161 break;
1163 /* Now process the indirect call. */
1164 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1166 enum pure_const_state_e edge_state = IPA_CONST;
1167 bool edge_looping = false;
1169 if (dump_file && (dump_flags & TDF_DETAILS))
1170 fprintf (dump_file, " Indirect call");
1171 state_from_flags (&edge_state, &edge_looping,
1172 ie->indirect_info->ecf_flags,
1173 cgraph_edge_cannot_lead_to_return (ie));
1174 /* Merge the results with what we already know. */
1175 better_state (&edge_state, &edge_looping,
1176 w_l->state_previously_known,
1177 w_l->looping_previously_known);
1178 worse_state (&pure_const_state, &looping,
1179 edge_state, edge_looping);
1180 if (pure_const_state == IPA_NEITHER)
1181 break;
1183 if (pure_const_state == IPA_NEITHER)
1184 break;
1186 /* And finally all loads and stores. */
1187 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
1189 enum pure_const_state_e ref_state = IPA_CONST;
1190 bool ref_looping = false;
1191 switch (ref->use)
1193 case IPA_REF_LOAD:
1194 /* readonly reads are safe. */
1195 if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
1196 break;
1197 if (dump_file && (dump_flags & TDF_DETAILS))
1198 fprintf (dump_file, " nonreadonly global var read\n");
1199 ref_state = IPA_PURE;
1200 break;
1201 case IPA_REF_STORE:
1202 if (ipa_ref_cannot_lead_to_return (ref))
1203 break;
1204 ref_state = IPA_NEITHER;
1205 if (dump_file && (dump_flags & TDF_DETAILS))
1206 fprintf (dump_file, " global var write\n");
1207 break;
1208 case IPA_REF_ADDR:
1209 break;
1211 better_state (&ref_state, &ref_looping,
1212 w_l->state_previously_known,
1213 w_l->looping_previously_known);
1214 worse_state (&pure_const_state, &looping,
1215 ref_state, ref_looping);
1216 if (pure_const_state == IPA_NEITHER)
1217 break;
1219 w_info = (struct ipa_dfs_info *) w->aux;
1220 w = w_info->next_cycle;
1222 if (dump_file && (dump_flags & TDF_DETAILS))
1223 fprintf (dump_file, "Result %s looping %i\n",
1224 pure_const_names [pure_const_state],
1225 looping);
1227 /* Copy back the region's pure_const_state which is shared by
1228 all nodes in the region. */
1229 w = node;
1230 while (w)
1232 funct_state w_l = get_function_state (w);
1233 enum pure_const_state_e this_state = pure_const_state;
1234 bool this_looping = looping;
1236 if (w_l->state_previously_known != IPA_NEITHER
1237 && this_state > w_l->state_previously_known)
1239 this_state = w_l->state_previously_known;
1240 this_looping |= w_l->looping_previously_known;
1242 if (!this_looping && self_recursive_p (w))
1243 this_looping = true;
1244 if (!w_l->looping_previously_known)
1245 this_looping = false;
1247 /* All nodes within a cycle share the same info. */
1248 w_l->pure_const_state = this_state;
1249 w_l->looping = this_looping;
1251 switch (this_state)
1253 case IPA_CONST:
1254 if (!TREE_READONLY (w->decl))
1256 warn_function_const (w->decl, !this_looping);
1257 if (dump_file)
1258 fprintf (dump_file, "Function found to be %sconst: %s\n",
1259 this_looping ? "looping " : "",
1260 cgraph_node_name (w));
1262 cgraph_set_readonly_flag (w, true);
1263 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1264 break;
1266 case IPA_PURE:
1267 if (!DECL_PURE_P (w->decl))
1269 warn_function_pure (w->decl, !this_looping);
1270 if (dump_file)
1271 fprintf (dump_file, "Function found to be %spure: %s\n",
1272 this_looping ? "looping " : "",
1273 cgraph_node_name (w));
1275 cgraph_set_pure_flag (w, true);
1276 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1277 break;
1279 default:
1280 break;
1282 w_info = (struct ipa_dfs_info *) w->aux;
1283 w = w_info->next_cycle;
1287 /* Cleanup. */
1288 for (node = cgraph_nodes; node; node = node->next)
1290 /* Get rid of the aux information. */
1291 if (node->aux)
1293 w_info = (struct ipa_dfs_info *) node->aux;
1294 free (node->aux);
1295 node->aux = NULL;
1299 free (order);
1302 /* Produce transitive closure over the callgraph and compute nothrow
1303 attributes. */
1305 static void
1306 propagate_nothrow (void)
1308 struct cgraph_node *node;
1309 struct cgraph_node *w;
1310 struct cgraph_node **order =
1311 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1312 int order_pos;
1313 int i;
1314 struct ipa_dfs_info * w_info;
1316 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
1317 if (dump_file)
1319 dump_cgraph (dump_file);
1320 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
1323 /* Propagate the local information thru the call graph to produce
1324 the global information. All the nodes within a cycle will have
1325 the same info so we collapse cycles first. Then we can do the
1326 propagation in one pass from the leaves to the roots. */
1327 for (i = 0; i < order_pos; i++ )
1329 bool can_throw = false;
1330 node = order[i];
1332 /* Find the worst state for any node in the cycle. */
1333 w = node;
1334 while (w)
1336 struct cgraph_edge *e, *ie;
1337 funct_state w_l = get_function_state (w);
1339 if (w_l->can_throw
1340 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1341 can_throw = true;
1343 if (can_throw)
1344 break;
1346 for (e = w->callees; e; e = e->next_callee)
1348 struct cgraph_node *y = e->callee;
1350 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1352 funct_state y_l = get_function_state (y);
1354 if (can_throw)
1355 break;
1356 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1357 && e->can_throw_external)
1358 can_throw = true;
1360 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1361 can_throw = true;
1363 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1364 if (ie->can_throw_external)
1365 can_throw = true;
1366 w_info = (struct ipa_dfs_info *) w->aux;
1367 w = w_info->next_cycle;
1370 /* Copy back the region's pure_const_state which is shared by
1371 all nodes in the region. */
1372 w = node;
1373 while (w)
1375 funct_state w_l = get_function_state (w);
1376 if (!can_throw && !TREE_NOTHROW (w->decl))
1378 struct cgraph_edge *e;
1379 cgraph_set_nothrow_flag (w, true);
1380 for (e = w->callers; e; e = e->next_caller)
1381 e->can_throw_external = false;
1382 if (dump_file)
1383 fprintf (dump_file, "Function found to be nothrow: %s\n",
1384 cgraph_node_name (w));
1386 else if (can_throw && !TREE_NOTHROW (w->decl))
1387 w_l->can_throw = true;
1388 w_info = (struct ipa_dfs_info *) w->aux;
1389 w = w_info->next_cycle;
1393 /* Cleanup. */
1394 for (node = cgraph_nodes; node; node = node->next)
1396 /* Get rid of the aux information. */
1397 if (node->aux)
1399 w_info = (struct ipa_dfs_info *) node->aux;
1400 free (node->aux);
1401 node->aux = NULL;
1405 free (order);
1409 /* Produce the global information by preforming a transitive closure
1410 on the local information that was produced by generate_summary. */
1412 static unsigned int
1413 propagate (void)
1415 struct cgraph_node *node;
1417 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1418 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1419 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1421 /* Nothrow makes more function to not lead to return and improve
1422 later analysis. */
1423 propagate_nothrow ();
1424 propagate_pure_const ();
1426 /* Cleanup. */
1427 for (node = cgraph_nodes; node; node = node->next)
1428 if (has_function_state (node))
1429 free (get_function_state (node));
1430 VEC_free (funct_state, heap, funct_state_vec);
1431 finish_state ();
1432 return 0;
1435 static bool
1436 gate_pure_const (void)
1438 return (flag_ipa_pure_const
1439 /* Don't bother doing anything if the program has errors. */
1440 && !seen_error ());
1443 struct ipa_opt_pass_d pass_ipa_pure_const =
1446 IPA_PASS,
1447 "pure-const", /* name */
1448 gate_pure_const, /* gate */
1449 propagate, /* execute */
1450 NULL, /* sub */
1451 NULL, /* next */
1452 0, /* static_pass_number */
1453 TV_IPA_PURE_CONST, /* tv_id */
1454 0, /* properties_required */
1455 0, /* properties_provided */
1456 0, /* properties_destroyed */
1457 0, /* todo_flags_start */
1458 0 /* todo_flags_finish */
1460 generate_summary, /* generate_summary */
1461 pure_const_write_summary, /* write_summary */
1462 pure_const_read_summary, /* read_summary */
1463 NULL, /* write_optimization_summary */
1464 NULL, /* read_optimization_summary */
1465 NULL, /* stmt_fixup */
1466 0, /* TODOs */
1467 NULL, /* function_transform */
1468 NULL /* variable_transform */
1471 /* Return true if function should be skipped for local pure const analysis. */
1473 static bool
1474 skip_function_for_local_pure_const (struct cgraph_node *node)
1476 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1477 we must not promote functions that are called by already processed functions. */
1479 if (function_called_by_processed_nodes_p ())
1481 if (dump_file)
1482 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1483 return true;
1485 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1487 if (dump_file)
1488 fprintf (dump_file, "Function is not available or overwrittable; not analyzing.\n");
1489 return true;
1491 return false;
1494 /* Simple local pass for pure const discovery reusing the analysis from
1495 ipa_pure_const. This pass is effective when executed together with
1496 other optimization passes in early optimization pass queue. */
1498 static unsigned int
1499 local_pure_const (void)
1501 bool changed = false;
1502 funct_state l;
1503 bool skip;
1504 struct cgraph_node *node;
1506 node = cgraph_node (current_function_decl);
1507 skip = skip_function_for_local_pure_const (node);
1508 if (!warn_suggest_attribute_const
1509 && !warn_suggest_attribute_pure
1510 && skip)
1511 return 0;
1513 /* First do NORETURN discovery. */
1514 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1515 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1517 if (warn_missing_noreturn
1518 && !lang_hooks.missing_noreturn_ok_p (cfun->decl))
1519 warning_at (DECL_SOURCE_LOCATION (cfun->decl), OPT_Wmissing_noreturn,
1520 "function might be possible candidate "
1521 "for attribute %<noreturn%>");
1522 if (dump_file)
1523 fprintf (dump_file, "Function found to be noreturn: %s\n",
1524 lang_hooks.decl_printable_name (current_function_decl, 2));
1526 /* Update declaration and reduce profile to executed once. */
1527 TREE_THIS_VOLATILE (current_function_decl) = 1;
1528 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1529 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1531 changed = true;
1533 l = analyze_function (node, false);
1535 switch (l->pure_const_state)
1537 case IPA_CONST:
1538 if (!TREE_READONLY (current_function_decl))
1540 warn_function_const (current_function_decl, !l->looping);
1541 if (!skip)
1543 cgraph_set_readonly_flag (node, true);
1544 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1545 changed = true;
1547 if (dump_file)
1548 fprintf (dump_file, "Function found to be %sconst: %s\n",
1549 l->looping ? "looping " : "",
1550 lang_hooks.decl_printable_name (current_function_decl,
1551 2));
1553 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1554 && !l->looping)
1556 if (!skip)
1558 cgraph_set_looping_const_or_pure_flag (node, false);
1559 changed = true;
1561 if (dump_file)
1562 fprintf (dump_file, "Function found to be non-looping: %s\n",
1563 lang_hooks.decl_printable_name (current_function_decl,
1564 2));
1566 break;
1568 case IPA_PURE:
1569 if (!DECL_PURE_P (current_function_decl))
1571 if (!skip)
1573 cgraph_set_pure_flag (node, true);
1574 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1575 changed = true;
1577 warn_function_pure (current_function_decl, !l->looping);
1578 if (dump_file)
1579 fprintf (dump_file, "Function found to be %spure: %s\n",
1580 l->looping ? "looping " : "",
1581 lang_hooks.decl_printable_name (current_function_decl,
1582 2));
1584 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1585 && !l->looping)
1587 if (!skip)
1589 cgraph_set_looping_const_or_pure_flag (node, false);
1590 changed = true;
1592 if (dump_file)
1593 fprintf (dump_file, "Function found to be non-looping: %s\n",
1594 lang_hooks.decl_printable_name (current_function_decl,
1595 2));
1597 break;
1599 default:
1600 break;
1602 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1604 struct cgraph_edge *e;
1606 cgraph_set_nothrow_flag (node, true);
1607 for (e = node->callers; e; e = e->next_caller)
1608 e->can_throw_external = false;
1609 changed = true;
1610 if (dump_file)
1611 fprintf (dump_file, "Function found to be nothrow: %s\n",
1612 lang_hooks.decl_printable_name (current_function_decl,
1613 2));
1615 if (l)
1616 free (l);
1617 if (changed)
1618 return execute_fixup_cfg ();
1619 else
1620 return 0;
1623 struct gimple_opt_pass pass_local_pure_const =
1626 GIMPLE_PASS,
1627 "local-pure-const", /* name */
1628 gate_pure_const, /* gate */
1629 local_pure_const, /* execute */
1630 NULL, /* sub */
1631 NULL, /* next */
1632 0, /* static_pass_number */
1633 TV_IPA_PURE_CONST, /* tv_id */
1634 0, /* properties_required */
1635 0, /* properties_provided */
1636 0, /* properties_destroyed */
1637 0, /* todo_flags_start */
1638 0 /* todo_flags_finish */