* tree.c (free_lang_data_in_decl): Also set target/optimization flags
[official-gcc.git] / gcc / ipa-pure-const.c
blobba76275a696792ecd89b0346fe66755a2c455cc1
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
60 /* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
62 what is found. */
63 enum pure_const_state_e
65 IPA_CONST,
66 IPA_PURE,
67 IPA_NEITHER
70 const char *pure_const_names[3] = {"const", "pure", "neither"};
72 /* Holder for the const_state. There is one of these per function
73 decl. */
74 struct funct_state_d
76 /* See above. */
77 enum pure_const_state_e pure_const_state;
78 /* What user set here; we can be always sure about this. */
79 enum pure_const_state_e state_previously_known;
80 bool looping_previously_known;
82 /* True if the function could possibly infinite loop. There are a
83 lot of ways that this could be determined. We are pretty
84 conservative here. While it is possible to cse pure and const
85 calls, it is not legal to have dce get rid of the call if there
86 is a possibility that the call could infinite loop since this is
87 a behavioral change. */
88 bool looping;
90 bool can_throw;
92 /* If function can call free, munmap or otherwise make previously
93 non-trapping memory accesses trapping. */
94 bool can_free;
97 /* State used when we know nothing about function. */
98 static struct funct_state_d varying_state
99 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
102 typedef struct funct_state_d * funct_state;
104 /* The storage of the funct_state is abstracted because there is the
105 possibility that it may be desirable to move this to the cgraph
106 local info. */
108 /* Array, indexed by cgraph node uid, of function states. */
110 static vec<funct_state> funct_state_vec;
112 static bool gate_pure_const (void);
114 namespace {
116 const pass_data pass_data_ipa_pure_const =
118 IPA_PASS, /* type */
119 "pure-const", /* name */
120 OPTGROUP_NONE, /* optinfo_flags */
121 TV_IPA_PURE_CONST, /* tv_id */
122 0, /* properties_required */
123 0, /* properties_provided */
124 0, /* properties_destroyed */
125 0, /* todo_flags_start */
126 0, /* todo_flags_finish */
129 class pass_ipa_pure_const : public ipa_opt_pass_d
131 public:
132 pass_ipa_pure_const(gcc::context *ctxt);
134 /* opt_pass methods: */
135 bool gate (function *) { return gate_pure_const (); }
136 unsigned int execute (function *fun);
138 void register_hooks (void);
140 private:
141 bool init_p;
143 /* Holders of ipa cgraph hooks: */
144 struct cgraph_node_hook_list *function_insertion_hook_holder;
145 struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 struct cgraph_node_hook_list *node_removal_hook_holder;
148 }; // class pass_ipa_pure_const
150 } // anon namespace
152 /* Try to guess if function body will always be visible to compiler
153 when compiling the call and whether compiler will be able
154 to propagate the information by itself. */
156 static bool
157 function_always_visible_to_compiler_p (tree decl)
159 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
162 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
163 is true if the function is known to be finite. The diagnostic is
164 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
165 OPTION, this function may initialize it and it is always returned
166 by the function. */
168 static hash_set<tree> *
169 suggest_attribute (int option, tree decl, bool known_finite,
170 hash_set<tree> *warned_about,
171 const char * attrib_name)
173 if (!option_enabled (option, &global_options))
174 return warned_about;
175 if (TREE_THIS_VOLATILE (decl)
176 || (known_finite && function_always_visible_to_compiler_p (decl)))
177 return warned_about;
179 if (!warned_about)
180 warned_about = new hash_set<tree>;
181 if (warned_about->contains (decl))
182 return warned_about;
183 warned_about->add (decl);
184 warning_at (DECL_SOURCE_LOCATION (decl),
185 option,
186 known_finite
187 ? _("function might be candidate for attribute %<%s%>")
188 : _("function might be candidate for attribute %<%s%>"
189 " if it is known to return normally"), attrib_name);
190 return warned_about;
193 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. */
196 static void
197 warn_function_pure (tree decl, bool known_finite)
199 static hash_set<tree> *warned_about;
201 warned_about
202 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
203 known_finite, warned_about, "pure");
206 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
207 is true if the function is known to be finite. */
209 static void
210 warn_function_const (tree decl, bool known_finite)
212 static hash_set<tree> *warned_about;
213 warned_about
214 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
215 known_finite, warned_about, "const");
218 static void
219 warn_function_noreturn (tree decl)
221 static hash_set<tree> *warned_about;
222 if (!lang_hooks.missing_noreturn_ok_p (decl)
223 && targetm.warn_func_return (decl))
224 warned_about
225 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
226 true, warned_about, "noreturn");
229 /* Return true if we have a function state for NODE. */
231 static inline bool
232 has_function_state (struct cgraph_node *node)
234 if (!funct_state_vec.exists ()
235 || funct_state_vec.length () <= (unsigned int)node->uid)
236 return false;
237 return funct_state_vec[node->uid] != NULL;
240 /* Return the function state from NODE. */
242 static inline funct_state
243 get_function_state (struct cgraph_node *node)
245 if (!funct_state_vec.exists ()
246 || funct_state_vec.length () <= (unsigned int)node->uid
247 || !funct_state_vec[node->uid])
248 /* We might want to put correct previously_known state into varying. */
249 return &varying_state;
250 return funct_state_vec[node->uid];
253 /* Set the function state S for NODE. */
255 static inline void
256 set_function_state (struct cgraph_node *node, funct_state s)
258 if (!funct_state_vec.exists ()
259 || funct_state_vec.length () <= (unsigned int)node->uid)
260 funct_state_vec.safe_grow_cleared (node->uid + 1);
261 funct_state_vec[node->uid] = s;
264 /* Check to see if the use (or definition when CHECKING_WRITE is true)
265 variable T is legal in a function that is either pure or const. */
267 static inline void
268 check_decl (funct_state local,
269 tree t, bool checking_write, bool ipa)
271 /* Do not want to do anything with volatile except mark any
272 function that uses one to be not const or pure. */
273 if (TREE_THIS_VOLATILE (t))
275 local->pure_const_state = IPA_NEITHER;
276 if (dump_file)
277 fprintf (dump_file, " Volatile operand is not const/pure");
278 return;
281 /* Do not care about a local automatic that is not static. */
282 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
283 return;
285 /* If the variable has the "used" attribute, treat it as if it had a
286 been touched by the devil. */
287 if (DECL_PRESERVE_P (t))
289 local->pure_const_state = IPA_NEITHER;
290 if (dump_file)
291 fprintf (dump_file, " Used static/global variable is not const/pure\n");
292 return;
295 /* In IPA mode we are not interested in checking actual loads and stores;
296 they will be processed at propagation time using ipa_ref. */
297 if (ipa)
298 return;
300 /* Since we have dealt with the locals and params cases above, if we
301 are CHECKING_WRITE, this cannot be a pure or constant
302 function. */
303 if (checking_write)
305 local->pure_const_state = IPA_NEITHER;
306 if (dump_file)
307 fprintf (dump_file, " static/global memory write is not const/pure\n");
308 return;
311 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
313 /* Readonly reads are safe. */
314 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
315 return; /* Read of a constant, do not change the function state. */
316 else
318 if (dump_file)
319 fprintf (dump_file, " global memory read is not const\n");
320 /* Just a regular read. */
321 if (local->pure_const_state == IPA_CONST)
322 local->pure_const_state = IPA_PURE;
325 else
327 /* Compilation level statics can be read if they are readonly
328 variables. */
329 if (TREE_READONLY (t))
330 return;
332 if (dump_file)
333 fprintf (dump_file, " static memory read is not const\n");
334 /* Just a regular read. */
335 if (local->pure_const_state == IPA_CONST)
336 local->pure_const_state = IPA_PURE;
341 /* Check to see if the use (or definition when CHECKING_WRITE is true)
342 variable T is legal in a function that is either pure or const. */
344 static inline void
345 check_op (funct_state local, tree t, bool checking_write)
347 t = get_base_address (t);
348 if (t && TREE_THIS_VOLATILE (t))
350 local->pure_const_state = IPA_NEITHER;
351 if (dump_file)
352 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
353 return;
355 else if (t
356 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
357 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
358 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
360 if (dump_file)
361 fprintf (dump_file, " Indirect ref to local memory is OK\n");
362 return;
364 else if (checking_write)
366 local->pure_const_state = IPA_NEITHER;
367 if (dump_file)
368 fprintf (dump_file, " Indirect ref write is not const/pure\n");
369 return;
371 else
373 if (dump_file)
374 fprintf (dump_file, " Indirect ref read is not const\n");
375 if (local->pure_const_state == IPA_CONST)
376 local->pure_const_state = IPA_PURE;
380 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
382 static void
383 state_from_flags (enum pure_const_state_e *state, bool *looping,
384 int flags, bool cannot_lead_to_return)
386 *looping = false;
387 if (flags & ECF_LOOPING_CONST_OR_PURE)
389 *looping = true;
390 if (dump_file && (dump_flags & TDF_DETAILS))
391 fprintf (dump_file, " looping");
393 if (flags & ECF_CONST)
395 *state = IPA_CONST;
396 if (dump_file && (dump_flags & TDF_DETAILS))
397 fprintf (dump_file, " const\n");
399 else if (flags & ECF_PURE)
401 *state = IPA_PURE;
402 if (dump_file && (dump_flags & TDF_DETAILS))
403 fprintf (dump_file, " pure\n");
405 else if (cannot_lead_to_return)
407 *state = IPA_PURE;
408 *looping = true;
409 if (dump_file && (dump_flags & TDF_DETAILS))
410 fprintf (dump_file, " ignoring side effects->pure looping\n");
412 else
414 if (dump_file && (dump_flags & TDF_DETAILS))
415 fprintf (dump_file, " neither\n");
416 *state = IPA_NEITHER;
417 *looping = true;
421 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
422 into STATE and LOOPING better of the two variants.
423 Be sure to merge looping correctly. IPA_NEITHER functions
424 have looping 0 even if they don't have to return. */
426 static inline void
427 better_state (enum pure_const_state_e *state, bool *looping,
428 enum pure_const_state_e state2, bool looping2)
430 if (state2 < *state)
432 if (*state == IPA_NEITHER)
433 *looping = looping2;
434 else
435 *looping = MIN (*looping, looping2);
436 *state = state2;
438 else if (state2 != IPA_NEITHER)
439 *looping = MIN (*looping, looping2);
442 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
443 into STATE and LOOPING worse of the two variants.
444 N is the actual node called. */
446 static inline void
447 worse_state (enum pure_const_state_e *state, bool *looping,
448 enum pure_const_state_e state2, bool looping2,
449 struct symtab_node *from,
450 struct symtab_node *to)
452 /* Consider function:
454 bool a(int *p)
456 return *p==*p;
459 During early optimization we will turn this into:
461 bool a(int *p)
463 return true;
466 Now if this function will be detected as CONST however when interposed it
467 may end up being just pure. We always must assume the worst scenario here.
469 if (*state == IPA_CONST && state2 == IPA_CONST
470 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
472 if (dump_file && (dump_flags & TDF_DETAILS))
473 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
474 "bind to current def.\n", to->name ());
475 state2 = IPA_PURE;
477 *state = MAX (*state, state2);
478 *looping = MAX (*looping, looping2);
481 /* Recognize special cases of builtins that are by themselves not pure or const
482 but function using them is. */
483 static bool
484 special_builtin_state (enum pure_const_state_e *state, bool *looping,
485 tree callee)
487 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
488 switch (DECL_FUNCTION_CODE (callee))
490 case BUILT_IN_RETURN:
491 case BUILT_IN_UNREACHABLE:
492 case BUILT_IN_ALLOCA:
493 case BUILT_IN_ALLOCA_WITH_ALIGN:
494 case BUILT_IN_STACK_SAVE:
495 case BUILT_IN_STACK_RESTORE:
496 case BUILT_IN_EH_POINTER:
497 case BUILT_IN_EH_FILTER:
498 case BUILT_IN_UNWIND_RESUME:
499 case BUILT_IN_CXA_END_CLEANUP:
500 case BUILT_IN_EH_COPY_VALUES:
501 case BUILT_IN_FRAME_ADDRESS:
502 case BUILT_IN_APPLY:
503 case BUILT_IN_APPLY_ARGS:
504 *looping = false;
505 *state = IPA_CONST;
506 return true;
507 case BUILT_IN_PREFETCH:
508 *looping = true;
509 *state = IPA_CONST;
510 return true;
511 default:
512 break;
514 return false;
517 /* Check the parameters of a function call to CALL_EXPR to see if
518 there are any references in the parameters that are not allowed for
519 pure or const functions. Also check to see if this is either an
520 indirect call, a call outside the compilation unit, or has special
521 attributes that may also effect the purity. The CALL_EXPR node for
522 the entire call expression. */
524 static void
525 check_call (funct_state local, gcall *call, bool ipa)
527 int flags = gimple_call_flags (call);
528 tree callee_t = gimple_call_fndecl (call);
529 bool possibly_throws = stmt_could_throw_p (call);
530 bool possibly_throws_externally = (possibly_throws
531 && stmt_can_throw_external (call));
533 if (possibly_throws)
535 unsigned int i;
536 for (i = 0; i < gimple_num_ops (call); i++)
537 if (gimple_op (call, i)
538 && tree_could_throw_p (gimple_op (call, i)))
540 if (possibly_throws && cfun->can_throw_non_call_exceptions)
542 if (dump_file)
543 fprintf (dump_file, " operand can throw; looping\n");
544 local->looping = true;
546 if (possibly_throws_externally)
548 if (dump_file)
549 fprintf (dump_file, " operand can throw externally\n");
550 local->can_throw = true;
555 /* The const and pure flags are set by a variety of places in the
556 compiler (including here). If someone has already set the flags
557 for the callee, (such as for some of the builtins) we will use
558 them, otherwise we will compute our own information.
560 Const and pure functions have less clobber effects than other
561 functions so we process these first. Otherwise if it is a call
562 outside the compilation unit or an indirect call we punt. This
563 leaves local calls which will be processed by following the call
564 graph. */
565 if (callee_t)
567 enum pure_const_state_e call_state;
568 bool call_looping;
570 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
571 && !nonfreeing_call_p (call))
572 local->can_free = true;
574 if (special_builtin_state (&call_state, &call_looping, callee_t))
576 worse_state (&local->pure_const_state, &local->looping,
577 call_state, call_looping,
578 NULL, NULL);
579 return;
581 /* When bad things happen to bad functions, they cannot be const
582 or pure. */
583 if (setjmp_call_p (callee_t))
585 if (dump_file)
586 fprintf (dump_file, " setjmp is not const/pure\n");
587 local->looping = true;
588 local->pure_const_state = IPA_NEITHER;
591 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
592 switch (DECL_FUNCTION_CODE (callee_t))
594 case BUILT_IN_LONGJMP:
595 case BUILT_IN_NONLOCAL_GOTO:
596 if (dump_file)
597 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
598 local->pure_const_state = IPA_NEITHER;
599 local->looping = true;
600 break;
601 default:
602 break;
605 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
606 local->can_free = true;
608 /* When not in IPA mode, we can still handle self recursion. */
609 if (!ipa && callee_t
610 && recursive_call_p (current_function_decl, callee_t))
612 if (dump_file)
613 fprintf (dump_file, " Recursive call can loop.\n");
614 local->looping = true;
616 /* Either callee is unknown or we are doing local analysis.
617 Look to see if there are any bits available for the callee (such as by
618 declaration or because it is builtin) and process solely on the basis of
619 those bits. Handle internal calls always, those calls don't have
620 corresponding cgraph edges and thus aren't processed during
621 the propagation. */
622 else if (!ipa || gimple_call_internal_p (call))
624 enum pure_const_state_e call_state;
625 bool call_looping;
626 if (possibly_throws && cfun->can_throw_non_call_exceptions)
628 if (dump_file)
629 fprintf (dump_file, " can throw; looping\n");
630 local->looping = true;
632 if (possibly_throws_externally)
634 if (dump_file)
636 fprintf (dump_file, " can throw externally to lp %i\n",
637 lookup_stmt_eh_lp (call));
638 if (callee_t)
639 fprintf (dump_file, " callee:%s\n",
640 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
642 local->can_throw = true;
644 if (dump_file && (dump_flags & TDF_DETAILS))
645 fprintf (dump_file, " checking flags for call:");
646 state_from_flags (&call_state, &call_looping, flags,
647 ((flags & (ECF_NORETURN | ECF_NOTHROW))
648 == (ECF_NORETURN | ECF_NOTHROW))
649 || (!flag_exceptions && (flags & ECF_NORETURN)));
650 worse_state (&local->pure_const_state, &local->looping,
651 call_state, call_looping, NULL, NULL);
653 /* Direct functions calls are handled by IPA propagation. */
656 /* Wrapper around check_decl for loads in local more. */
658 static bool
659 check_load (gimple *, tree op, tree, void *data)
661 if (DECL_P (op))
662 check_decl ((funct_state)data, op, false, false);
663 else
664 check_op ((funct_state)data, op, false);
665 return false;
668 /* Wrapper around check_decl for stores in local more. */
670 static bool
671 check_store (gimple *, tree op, tree, void *data)
673 if (DECL_P (op))
674 check_decl ((funct_state)data, op, true, false);
675 else
676 check_op ((funct_state)data, op, true);
677 return false;
680 /* Wrapper around check_decl for loads in ipa mode. */
682 static bool
683 check_ipa_load (gimple *, tree op, tree, void *data)
685 if (DECL_P (op))
686 check_decl ((funct_state)data, op, false, true);
687 else
688 check_op ((funct_state)data, op, false);
689 return false;
692 /* Wrapper around check_decl for stores in ipa mode. */
694 static bool
695 check_ipa_store (gimple *, tree op, tree, void *data)
697 if (DECL_P (op))
698 check_decl ((funct_state)data, op, true, true);
699 else
700 check_op ((funct_state)data, op, true);
701 return false;
704 /* Look into pointer pointed to by GSIP and figure out what interesting side
705 effects it has. */
706 static void
707 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
709 gimple *stmt = gsi_stmt (*gsip);
711 if (is_gimple_debug (stmt))
712 return;
714 /* Do consider clobber as side effects before IPA, so we rather inline
715 C++ destructors and keep clobber semantics than eliminate them.
717 TODO: We may get smarter during early optimizations on these and let
718 functions containing only clobbers to be optimized more. This is a common
719 case of C++ destructors. */
721 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
722 return;
724 if (dump_file)
726 fprintf (dump_file, " scanning: ");
727 print_gimple_stmt (dump_file, stmt, 0, 0);
730 if (gimple_has_volatile_ops (stmt)
731 && !gimple_clobber_p (stmt))
733 local->pure_const_state = IPA_NEITHER;
734 if (dump_file)
735 fprintf (dump_file, " Volatile stmt is not const/pure\n");
738 /* Look for loads and stores. */
739 walk_stmt_load_store_ops (stmt, local,
740 ipa ? check_ipa_load : check_load,
741 ipa ? check_ipa_store : check_store);
743 if (gimple_code (stmt) != GIMPLE_CALL
744 && stmt_could_throw_p (stmt))
746 if (cfun->can_throw_non_call_exceptions)
748 if (dump_file)
749 fprintf (dump_file, " can throw; looping\n");
750 local->looping = true;
752 if (stmt_can_throw_external (stmt))
754 if (dump_file)
755 fprintf (dump_file, " can throw externally\n");
756 local->can_throw = true;
758 else
759 if (dump_file)
760 fprintf (dump_file, " can throw\n");
762 switch (gimple_code (stmt))
764 case GIMPLE_CALL:
765 check_call (local, as_a <gcall *> (stmt), ipa);
766 break;
767 case GIMPLE_LABEL:
768 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
769 /* Target of long jump. */
771 if (dump_file)
772 fprintf (dump_file, " nonlocal label is not const/pure\n");
773 local->pure_const_state = IPA_NEITHER;
775 break;
776 case GIMPLE_ASM:
777 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
779 if (dump_file)
780 fprintf (dump_file, " memory asm clobber is not const/pure\n");
781 /* Abandon all hope, ye who enter here. */
782 local->pure_const_state = IPA_NEITHER;
783 local->can_free = true;
785 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
787 if (dump_file)
788 fprintf (dump_file, " volatile is not const/pure\n");
789 /* Abandon all hope, ye who enter here. */
790 local->pure_const_state = IPA_NEITHER;
791 local->looping = true;
792 local->can_free = true;
794 return;
795 default:
796 break;
801 /* This is the main routine for finding the reference patterns for
802 global variables within a function FN. */
804 static funct_state
805 analyze_function (struct cgraph_node *fn, bool ipa)
807 tree decl = fn->decl;
808 funct_state l;
809 basic_block this_block;
811 l = XCNEW (struct funct_state_d);
812 l->pure_const_state = IPA_CONST;
813 l->state_previously_known = IPA_NEITHER;
814 l->looping_previously_known = true;
815 l->looping = false;
816 l->can_throw = false;
817 l->can_free = false;
818 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
819 flags_from_decl_or_type (fn->decl),
820 fn->cannot_return_p ());
822 if (fn->thunk.thunk_p || fn->alias)
824 /* Thunk gets propagated through, so nothing interesting happens. */
825 gcc_assert (ipa);
826 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
827 l->pure_const_state = IPA_NEITHER;
828 return l;
831 if (dump_file)
833 fprintf (dump_file, "\n\n local analysis of %s\n ",
834 fn->name ());
837 push_cfun (DECL_STRUCT_FUNCTION (decl));
839 FOR_EACH_BB_FN (this_block, cfun)
841 gimple_stmt_iterator gsi;
842 struct walk_stmt_info wi;
844 memset (&wi, 0, sizeof (wi));
845 for (gsi = gsi_start_bb (this_block);
846 !gsi_end_p (gsi);
847 gsi_next (&gsi))
849 check_stmt (&gsi, l, ipa);
850 if (l->pure_const_state == IPA_NEITHER
851 && l->looping
852 && l->can_throw
853 && l->can_free)
854 goto end;
858 end:
859 if (l->pure_const_state != IPA_NEITHER)
861 /* Const functions cannot have back edges (an
862 indication of possible infinite loop side
863 effect. */
864 if (mark_dfs_back_edges ())
866 /* Preheaders are needed for SCEV to work.
867 Simple latches and recorded exits improve chances that loop will
868 proved to be finite in testcases such as in loop-15.c
869 and loop-24.c */
870 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
871 | LOOPS_HAVE_SIMPLE_LATCHES
872 | LOOPS_HAVE_RECORDED_EXITS);
873 if (dump_file && (dump_flags & TDF_DETAILS))
874 flow_loops_dump (dump_file, NULL, 0);
875 if (mark_irreducible_loops ())
877 if (dump_file)
878 fprintf (dump_file, " has irreducible loops\n");
879 l->looping = true;
881 else
883 struct loop *loop;
884 scev_initialize ();
885 FOR_EACH_LOOP (loop, 0)
886 if (!finite_loop_p (loop))
888 if (dump_file)
889 fprintf (dump_file, " can not prove finiteness of "
890 "loop %i\n", loop->num);
891 l->looping =true;
892 break;
894 scev_finalize ();
896 loop_optimizer_finalize ();
900 if (dump_file && (dump_flags & TDF_DETAILS))
901 fprintf (dump_file, " checking previously known:");
903 better_state (&l->pure_const_state, &l->looping,
904 l->state_previously_known,
905 l->looping_previously_known);
906 if (TREE_NOTHROW (decl))
907 l->can_throw = false;
909 pop_cfun ();
910 if (dump_file)
912 if (l->looping)
913 fprintf (dump_file, "Function is locally looping.\n");
914 if (l->can_throw)
915 fprintf (dump_file, "Function is locally throwing.\n");
916 if (l->pure_const_state == IPA_CONST)
917 fprintf (dump_file, "Function is locally const.\n");
918 if (l->pure_const_state == IPA_PURE)
919 fprintf (dump_file, "Function is locally pure.\n");
920 if (l->can_free)
921 fprintf (dump_file, "Function can locally free.\n");
923 return l;
926 /* Called when new function is inserted to callgraph late. */
927 static void
928 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
930 /* There are some shared nodes, in particular the initializers on
931 static declarations. We do not need to scan them more than once
932 since all we would be interested in are the addressof
933 operations. */
934 if (opt_for_fn (node->decl, flag_ipa_pure_const))
935 set_function_state (node, analyze_function (node, true));
938 /* Called when new clone is inserted to callgraph late. */
940 static void
941 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
942 void *data ATTRIBUTE_UNUSED)
944 if (has_function_state (src))
946 funct_state l = XNEW (struct funct_state_d);
947 gcc_assert (!has_function_state (dst));
948 memcpy (l, get_function_state (src), sizeof (*l));
949 set_function_state (dst, l);
953 /* Called when new clone is inserted to callgraph late. */
955 static void
956 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
958 if (has_function_state (node))
960 funct_state l = get_function_state (node);
961 if (l != &varying_state)
962 free (l);
963 set_function_state (node, NULL);
968 void
969 pass_ipa_pure_const::
970 register_hooks (void)
972 if (init_p)
973 return;
975 init_p = true;
977 node_removal_hook_holder =
978 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
979 node_duplication_hook_holder =
980 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
981 function_insertion_hook_holder =
982 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
986 /* Analyze each function in the cgraph to see if it is locally PURE or
987 CONST. */
989 static void
990 pure_const_generate_summary (void)
992 struct cgraph_node *node;
994 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
995 pass->register_hooks ();
997 /* Process all of the functions.
999 We process AVAIL_INTERPOSABLE functions. We can not use the results
1000 by default, but the info can be used at LTO with -fwhole-program or
1001 when function got cloned and the clone is AVAILABLE. */
1003 FOR_EACH_DEFINED_FUNCTION (node)
1004 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1005 set_function_state (node, analyze_function (node, true));
1009 /* Serialize the ipa info for lto. */
1011 static void
1012 pure_const_write_summary (void)
1014 struct cgraph_node *node;
1015 struct lto_simple_output_block *ob
1016 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1017 unsigned int count = 0;
1018 lto_symtab_encoder_iterator lsei;
1019 lto_symtab_encoder_t encoder;
1021 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1023 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1024 lsei_next_function_in_partition (&lsei))
1026 node = lsei_cgraph_node (lsei);
1027 if (node->definition && has_function_state (node))
1028 count++;
1031 streamer_write_uhwi_stream (ob->main_stream, count);
1033 /* Process all of the functions. */
1034 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1035 lsei_next_function_in_partition (&lsei))
1037 node = lsei_cgraph_node (lsei);
1038 if (node->definition && has_function_state (node))
1040 struct bitpack_d bp;
1041 funct_state fs;
1042 int node_ref;
1043 lto_symtab_encoder_t encoder;
1045 fs = get_function_state (node);
1047 encoder = ob->decl_state->symtab_node_encoder;
1048 node_ref = lto_symtab_encoder_encode (encoder, node);
1049 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1051 /* Note that flags will need to be read in the opposite
1052 order as we are pushing the bitflags into FLAGS. */
1053 bp = bitpack_create (ob->main_stream);
1054 bp_pack_value (&bp, fs->pure_const_state, 2);
1055 bp_pack_value (&bp, fs->state_previously_known, 2);
1056 bp_pack_value (&bp, fs->looping_previously_known, 1);
1057 bp_pack_value (&bp, fs->looping, 1);
1058 bp_pack_value (&bp, fs->can_throw, 1);
1059 bp_pack_value (&bp, fs->can_free, 1);
1060 streamer_write_bitpack (&bp);
1064 lto_destroy_simple_output_block (ob);
1068 /* Deserialize the ipa info for lto. */
1070 static void
1071 pure_const_read_summary (void)
1073 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1074 struct lto_file_decl_data *file_data;
1075 unsigned int j = 0;
1077 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1078 pass->register_hooks ();
1080 while ((file_data = file_data_vec[j++]))
1082 const char *data;
1083 size_t len;
1084 struct lto_input_block *ib
1085 = lto_create_simple_input_block (file_data,
1086 LTO_section_ipa_pure_const,
1087 &data, &len);
1088 if (ib)
1090 unsigned int i;
1091 unsigned int count = streamer_read_uhwi (ib);
1093 for (i = 0; i < count; i++)
1095 unsigned int index;
1096 struct cgraph_node *node;
1097 struct bitpack_d bp;
1098 funct_state fs;
1099 lto_symtab_encoder_t encoder;
1101 fs = XCNEW (struct funct_state_d);
1102 index = streamer_read_uhwi (ib);
1103 encoder = file_data->symtab_node_encoder;
1104 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1105 index));
1106 set_function_state (node, fs);
1108 /* Note that the flags must be read in the opposite
1109 order in which they were written (the bitflags were
1110 pushed into FLAGS). */
1111 bp = streamer_read_bitpack (ib);
1112 fs->pure_const_state
1113 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1114 fs->state_previously_known
1115 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1116 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1117 fs->looping = bp_unpack_value (&bp, 1);
1118 fs->can_throw = bp_unpack_value (&bp, 1);
1119 fs->can_free = bp_unpack_value (&bp, 1);
1120 if (dump_file)
1122 int flags = flags_from_decl_or_type (node->decl);
1123 fprintf (dump_file, "Read info for %s/%i ",
1124 node->name (),
1125 node->order);
1126 if (flags & ECF_CONST)
1127 fprintf (dump_file, " const");
1128 if (flags & ECF_PURE)
1129 fprintf (dump_file, " pure");
1130 if (flags & ECF_NOTHROW)
1131 fprintf (dump_file, " nothrow");
1132 fprintf (dump_file, "\n pure const state: %s\n",
1133 pure_const_names[fs->pure_const_state]);
1134 fprintf (dump_file, " previously known state: %s\n",
1135 pure_const_names[fs->state_previously_known]);
1136 if (fs->looping)
1137 fprintf (dump_file," function is locally looping\n");
1138 if (fs->looping_previously_known)
1139 fprintf (dump_file," function is previously known looping\n");
1140 if (fs->can_throw)
1141 fprintf (dump_file," function is locally throwing\n");
1142 if (fs->can_free)
1143 fprintf (dump_file," function can locally free\n");
1147 lto_destroy_simple_input_block (file_data,
1148 LTO_section_ipa_pure_const,
1149 ib, data, len);
1154 /* We only propagate across edges that can throw externally and their callee
1155 is not interposable. */
1157 static bool
1158 ignore_edge_for_nothrow (struct cgraph_edge *e)
1160 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1161 return true;
1163 enum availability avail;
1164 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1165 e->caller);
1166 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1167 return true;
1168 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1169 && !e->callee->binds_to_current_def_p (e->caller);
1172 /* Return true if NODE is self recursive function.
1173 Indirectly recursive functions appears as non-trivial strongly
1174 connected components, so we need to care about self recursion
1175 only. */
1177 static bool
1178 self_recursive_p (struct cgraph_node *node)
1180 struct cgraph_edge *e;
1181 for (e = node->callees; e; e = e->next_callee)
1182 if (e->callee->function_symbol () == node)
1183 return true;
1184 return false;
1187 /* Return true if N is cdtor that is not const or pure. In this case we may
1188 need to remove unreachable function if it is marked const/pure. */
1190 static bool
1191 cdtor_p (cgraph_node *n, void *)
1193 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1194 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1195 return false;
1198 /* We only propagate across edges with non-interposable callee. */
1200 static bool
1201 ignore_edge_for_pure_const (struct cgraph_edge *e)
1203 enum availability avail;
1204 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1205 return (avail <= AVAIL_INTERPOSABLE);
1209 /* Produce transitive closure over the callgraph and compute pure/const
1210 attributes. */
1212 static bool
1213 propagate_pure_const (void)
1215 struct cgraph_node *node;
1216 struct cgraph_node *w;
1217 struct cgraph_node **order =
1218 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1219 int order_pos;
1220 int i;
1221 struct ipa_dfs_info * w_info;
1222 bool remove_p = false;
1223 bool has_cdtor;
1225 order_pos = ipa_reduced_postorder (order, true, false,
1226 ignore_edge_for_pure_const);
1227 if (dump_file)
1229 cgraph_node::dump_cgraph (dump_file);
1230 ipa_print_order (dump_file, "reduced", order, order_pos);
1233 /* Propagate the local information through the call graph to produce
1234 the global information. All the nodes within a cycle will have
1235 the same info so we collapse cycles first. Then we can do the
1236 propagation in one pass from the leaves to the roots. */
1237 for (i = 0; i < order_pos; i++ )
1239 enum pure_const_state_e pure_const_state = IPA_CONST;
1240 bool looping = false;
1241 int count = 0;
1242 node = order[i];
1244 if (node->alias)
1245 continue;
1247 if (dump_file && (dump_flags & TDF_DETAILS))
1248 fprintf (dump_file, "Starting cycle\n");
1250 /* Find the worst state for any node in the cycle. */
1251 w = node;
1252 while (w && pure_const_state != IPA_NEITHER)
1254 struct cgraph_edge *e;
1255 struct cgraph_edge *ie;
1256 int i;
1257 struct ipa_ref *ref = NULL;
1259 funct_state w_l = get_function_state (w);
1260 if (dump_file && (dump_flags & TDF_DETAILS))
1261 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1262 w->name (),
1263 w->order,
1264 pure_const_names[w_l->pure_const_state],
1265 w_l->looping);
1267 /* First merge in function body properties.
1268 We are safe to pass NULL as FROM and TO because we will take care
1269 of possible interposition when walking callees. */
1270 worse_state (&pure_const_state, &looping,
1271 w_l->pure_const_state, w_l->looping,
1272 NULL, NULL);
1273 if (pure_const_state == IPA_NEITHER)
1274 break;
1276 count++;
1278 /* We consider recursive cycles as possibly infinite.
1279 This might be relaxed since infinite recursion leads to stack
1280 overflow. */
1281 if (count > 1)
1282 looping = true;
1284 /* Now walk the edges and merge in callee properties. */
1285 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1286 e = e->next_callee)
1288 enum availability avail;
1289 struct cgraph_node *y = e->callee->
1290 function_or_virtual_thunk_symbol (&avail,
1291 e->caller);
1292 enum pure_const_state_e edge_state = IPA_CONST;
1293 bool edge_looping = false;
1295 if (dump_file && (dump_flags & TDF_DETAILS))
1297 fprintf (dump_file,
1298 " Call to %s/%i",
1299 e->callee->name (),
1300 e->callee->order);
1302 if (avail > AVAIL_INTERPOSABLE)
1304 funct_state y_l = get_function_state (y);
1305 if (dump_file && (dump_flags & TDF_DETAILS))
1307 fprintf (dump_file,
1308 " state:%s looping:%i\n",
1309 pure_const_names[y_l->pure_const_state],
1310 y_l->looping);
1312 if (y_l->pure_const_state > IPA_PURE
1313 && e->cannot_lead_to_return_p ())
1315 if (dump_file && (dump_flags & TDF_DETAILS))
1316 fprintf (dump_file,
1317 " Ignoring side effects"
1318 " -> pure, looping\n");
1319 edge_state = IPA_PURE;
1320 edge_looping = true;
1322 else
1324 edge_state = y_l->pure_const_state;
1325 edge_looping = y_l->looping;
1328 else if (special_builtin_state (&edge_state, &edge_looping,
1329 y->decl))
1331 else
1332 state_from_flags (&edge_state, &edge_looping,
1333 flags_from_decl_or_type (y->decl),
1334 e->cannot_lead_to_return_p ());
1336 /* Merge the results with what we already know. */
1337 better_state (&edge_state, &edge_looping,
1338 w_l->state_previously_known,
1339 w_l->looping_previously_known);
1340 worse_state (&pure_const_state, &looping,
1341 edge_state, edge_looping, e->caller, e->callee);
1342 if (pure_const_state == IPA_NEITHER)
1343 break;
1346 /* Now process the indirect call. */
1347 for (ie = w->indirect_calls;
1348 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1350 enum pure_const_state_e edge_state = IPA_CONST;
1351 bool edge_looping = false;
1353 if (dump_file && (dump_flags & TDF_DETAILS))
1354 fprintf (dump_file, " Indirect call");
1355 state_from_flags (&edge_state, &edge_looping,
1356 ie->indirect_info->ecf_flags,
1357 ie->cannot_lead_to_return_p ());
1358 /* Merge the results with what we already know. */
1359 better_state (&edge_state, &edge_looping,
1360 w_l->state_previously_known,
1361 w_l->looping_previously_known);
1362 worse_state (&pure_const_state, &looping,
1363 edge_state, edge_looping, NULL, NULL);
1364 if (pure_const_state == IPA_NEITHER)
1365 break;
1368 /* And finally all loads and stores. */
1369 for (i = 0; w->iterate_reference (i, ref)
1370 && pure_const_state != IPA_NEITHER; i++)
1372 enum pure_const_state_e ref_state = IPA_CONST;
1373 bool ref_looping = false;
1374 switch (ref->use)
1376 case IPA_REF_LOAD:
1377 /* readonly reads are safe. */
1378 if (TREE_READONLY (ref->referred->decl))
1379 break;
1380 if (dump_file && (dump_flags & TDF_DETAILS))
1381 fprintf (dump_file, " nonreadonly global var read\n");
1382 ref_state = IPA_PURE;
1383 break;
1384 case IPA_REF_STORE:
1385 if (ref->cannot_lead_to_return ())
1386 break;
1387 ref_state = IPA_NEITHER;
1388 if (dump_file && (dump_flags & TDF_DETAILS))
1389 fprintf (dump_file, " global var write\n");
1390 break;
1391 case IPA_REF_ADDR:
1392 case IPA_REF_CHKP:
1393 break;
1394 default:
1395 gcc_unreachable ();
1397 better_state (&ref_state, &ref_looping,
1398 w_l->state_previously_known,
1399 w_l->looping_previously_known);
1400 worse_state (&pure_const_state, &looping,
1401 ref_state, ref_looping, NULL, NULL);
1402 if (pure_const_state == IPA_NEITHER)
1403 break;
1405 w_info = (struct ipa_dfs_info *) w->aux;
1406 w = w_info->next_cycle;
1408 if (dump_file && (dump_flags & TDF_DETAILS))
1409 fprintf (dump_file, "Result %s looping %i\n",
1410 pure_const_names [pure_const_state],
1411 looping);
1413 /* Find the worst state of can_free for any node in the cycle. */
1414 bool can_free = false;
1415 w = node;
1416 while (w && !can_free)
1418 struct cgraph_edge *e;
1419 funct_state w_l = get_function_state (w);
1421 if (w_l->can_free
1422 || w->get_availability () == AVAIL_INTERPOSABLE
1423 || w->indirect_calls)
1424 can_free = true;
1426 for (e = w->callees; e && !can_free; e = e->next_callee)
1428 enum availability avail;
1429 struct cgraph_node *y = e->callee->
1430 function_or_virtual_thunk_symbol (&avail,
1431 e->caller);
1433 if (avail > AVAIL_INTERPOSABLE)
1434 can_free = get_function_state (y)->can_free;
1435 else
1436 can_free = true;
1438 w_info = (struct ipa_dfs_info *) w->aux;
1439 w = w_info->next_cycle;
1442 /* Copy back the region's pure_const_state which is shared by
1443 all nodes in the region. */
1444 w = node;
1445 while (w)
1447 funct_state w_l = get_function_state (w);
1448 enum pure_const_state_e this_state = pure_const_state;
1449 bool this_looping = looping;
1451 w_l->can_free = can_free;
1452 w->nonfreeing_fn = !can_free;
1453 if (!can_free && dump_file)
1454 fprintf (dump_file, "Function found not to call free: %s\n",
1455 w->name ());
1457 if (w_l->state_previously_known != IPA_NEITHER
1458 && this_state > w_l->state_previously_known)
1460 this_state = w_l->state_previously_known;
1461 if (this_state == IPA_NEITHER)
1462 this_looping = w_l->looping_previously_known;
1464 if (!this_looping && self_recursive_p (w))
1465 this_looping = true;
1466 if (!w_l->looping_previously_known)
1467 this_looping = false;
1469 /* All nodes within a cycle share the same info. */
1470 w_l->pure_const_state = this_state;
1471 w_l->looping = this_looping;
1473 /* Inline clones share declaration with their offline copies;
1474 do not modify their declarations since the offline copy may
1475 be different. */
1476 if (!w->global.inlined_to)
1477 switch (this_state)
1479 case IPA_CONST:
1480 if (!TREE_READONLY (w->decl))
1482 warn_function_const (w->decl, !this_looping);
1483 if (dump_file)
1484 fprintf (dump_file, "Function found to be %sconst: %s\n",
1485 this_looping ? "looping " : "",
1486 w->name ());
1488 /* Turning constructor or destructor to non-looping const/pure
1489 enables us to possibly remove the function completely. */
1490 if (this_looping)
1491 has_cdtor = false;
1492 else
1493 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1494 NULL, true);
1495 if (w->set_const_flag (true, this_looping))
1497 if (dump_file)
1498 fprintf (dump_file,
1499 "Declaration updated to be %sconst: %s\n",
1500 this_looping ? "looping " : "",
1501 w->name ());
1502 remove_p |= has_cdtor;
1504 break;
1506 case IPA_PURE:
1507 if (!DECL_PURE_P (w->decl))
1509 warn_function_pure (w->decl, !this_looping);
1510 if (dump_file)
1511 fprintf (dump_file, "Function found to be %spure: %s\n",
1512 this_looping ? "looping " : "",
1513 w->name ());
1515 if (this_looping)
1516 has_cdtor = false;
1517 else
1518 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1519 NULL, true);
1520 if (w->set_pure_flag (true, this_looping))
1522 if (dump_file)
1523 fprintf (dump_file,
1524 "Declaration updated to be %spure: %s\n",
1525 this_looping ? "looping " : "",
1526 w->name ());
1527 remove_p |= has_cdtor;
1529 break;
1531 default:
1532 break;
1534 w_info = (struct ipa_dfs_info *) w->aux;
1535 w = w_info->next_cycle;
1539 ipa_free_postorder_info ();
1540 free (order);
1541 return remove_p;
1544 /* Produce transitive closure over the callgraph and compute nothrow
1545 attributes. */
1547 static void
1548 propagate_nothrow (void)
1550 struct cgraph_node *node;
1551 struct cgraph_node *w;
1552 struct cgraph_node **order =
1553 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1554 int order_pos;
1555 int i;
1556 struct ipa_dfs_info * w_info;
1558 order_pos = ipa_reduced_postorder (order, true, false,
1559 ignore_edge_for_nothrow);
1560 if (dump_file)
1562 cgraph_node::dump_cgraph (dump_file);
1563 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1566 /* Propagate the local information through the call graph to produce
1567 the global information. All the nodes within a cycle will have
1568 the same info so we collapse cycles first. Then we can do the
1569 propagation in one pass from the leaves to the roots. */
1570 for (i = 0; i < order_pos; i++ )
1572 bool can_throw = false;
1573 node = order[i];
1575 if (node->alias)
1576 continue;
1578 /* Find the worst state for any node in the cycle. */
1579 w = node;
1580 while (w && !can_throw)
1582 struct cgraph_edge *e, *ie;
1584 if (!TREE_NOTHROW (w->decl))
1586 funct_state w_l = get_function_state (w);
1588 if (w_l->can_throw
1589 || w->get_availability () == AVAIL_INTERPOSABLE)
1590 can_throw = true;
1592 for (e = w->callees; e && !can_throw; e = e->next_callee)
1594 enum availability avail;
1596 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1597 continue;
1599 struct cgraph_node *y = e->callee->
1600 function_or_virtual_thunk_symbol (&avail,
1601 e->caller);
1603 /* We can use info about the callee only if we know it can
1604 not be interposed.
1605 When callee is compiled with non-call exceptions we also
1606 must check that the declaration is bound to current
1607 body as other semantically equivalent body may still
1608 throw. */
1609 if (avail <= AVAIL_INTERPOSABLE
1610 || (!TREE_NOTHROW (y->decl)
1611 && (get_function_state (y)->can_throw
1612 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1613 && !e->callee->binds_to_current_def_p (w)))))
1614 can_throw = true;
1616 for (ie = w->indirect_calls; ie && !can_throw;
1617 ie = ie->next_callee)
1618 if (ie->can_throw_external
1619 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1620 can_throw = true;
1622 w_info = (struct ipa_dfs_info *) w->aux;
1623 w = w_info->next_cycle;
1626 /* Copy back the region's pure_const_state which is shared by
1627 all nodes in the region. */
1628 w = node;
1629 while (w)
1631 funct_state w_l = get_function_state (w);
1632 if (!can_throw && !TREE_NOTHROW (w->decl))
1634 /* Inline clones share declaration with their offline copies;
1635 do not modify their declarations since the offline copy may
1636 be different. */
1637 if (!w->global.inlined_to)
1639 w->set_nothrow_flag (true);
1640 if (dump_file)
1641 fprintf (dump_file, "Function found to be nothrow: %s\n",
1642 w->name ());
1645 else if (can_throw && !TREE_NOTHROW (w->decl))
1646 w_l->can_throw = true;
1647 w_info = (struct ipa_dfs_info *) w->aux;
1648 w = w_info->next_cycle;
1652 ipa_free_postorder_info ();
1653 free (order);
1657 /* Produce the global information by preforming a transitive closure
1658 on the local information that was produced by generate_summary. */
1660 unsigned int
1661 pass_ipa_pure_const::
1662 execute (function *)
1664 struct cgraph_node *node;
1665 bool remove_p;
1667 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1668 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1669 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1671 /* Nothrow makes more function to not lead to return and improve
1672 later analysis. */
1673 propagate_nothrow ();
1674 remove_p = propagate_pure_const ();
1676 /* Cleanup. */
1677 FOR_EACH_FUNCTION (node)
1678 if (has_function_state (node))
1679 free (get_function_state (node));
1680 funct_state_vec.release ();
1681 return remove_p ? TODO_remove_functions : 0;
1684 static bool
1685 gate_pure_const (void)
1687 return flag_ipa_pure_const || in_lto_p;
1690 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1691 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1692 pure_const_generate_summary, /* generate_summary */
1693 pure_const_write_summary, /* write_summary */
1694 pure_const_read_summary, /* read_summary */
1695 NULL, /* write_optimization_summary */
1696 NULL, /* read_optimization_summary */
1697 NULL, /* stmt_fixup */
1698 0, /* function_transform_todo_flags_start */
1699 NULL, /* function_transform */
1700 NULL), /* variable_transform */
1701 init_p(false),
1702 function_insertion_hook_holder(NULL),
1703 node_duplication_hook_holder(NULL),
1704 node_removal_hook_holder(NULL)
1708 ipa_opt_pass_d *
1709 make_pass_ipa_pure_const (gcc::context *ctxt)
1711 return new pass_ipa_pure_const (ctxt);
1714 /* Return true if function should be skipped for local pure const analysis. */
1716 static bool
1717 skip_function_for_local_pure_const (struct cgraph_node *node)
1719 /* Because we do not schedule pass_fixup_cfg over whole program after early
1720 optimizations we must not promote functions that are called by already
1721 processed functions. */
1723 if (function_called_by_processed_nodes_p ())
1725 if (dump_file)
1726 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1727 return true;
1729 /* Save some work and do not analyze functions which are interposable and
1730 do not have any non-interposable aliases. */
1731 if (node->get_availability () <= AVAIL_INTERPOSABLE
1732 && !node->has_aliases_p ())
1734 if (dump_file)
1735 fprintf (dump_file,
1736 "Function is interposable; not analyzing.\n");
1737 return true;
1739 return false;
1742 /* Simple local pass for pure const discovery reusing the analysis from
1743 ipa_pure_const. This pass is effective when executed together with
1744 other optimization passes in early optimization pass queue. */
1746 namespace {
1748 const pass_data pass_data_local_pure_const =
1750 GIMPLE_PASS, /* type */
1751 "local-pure-const", /* name */
1752 OPTGROUP_NONE, /* optinfo_flags */
1753 TV_IPA_PURE_CONST, /* tv_id */
1754 0, /* properties_required */
1755 0, /* properties_provided */
1756 0, /* properties_destroyed */
1757 0, /* todo_flags_start */
1758 0, /* todo_flags_finish */
1761 class pass_local_pure_const : public gimple_opt_pass
1763 public:
1764 pass_local_pure_const (gcc::context *ctxt)
1765 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1768 /* opt_pass methods: */
1769 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1770 virtual bool gate (function *) { return gate_pure_const (); }
1771 virtual unsigned int execute (function *);
1773 }; // class pass_local_pure_const
1775 unsigned int
1776 pass_local_pure_const::execute (function *fun)
1778 bool changed = false;
1779 funct_state l;
1780 bool skip;
1781 struct cgraph_node *node;
1783 node = cgraph_node::get (current_function_decl);
1784 skip = skip_function_for_local_pure_const (node);
1785 if (!warn_suggest_attribute_const
1786 && !warn_suggest_attribute_pure
1787 && skip)
1788 return 0;
1790 l = analyze_function (node, false);
1792 /* Do NORETURN discovery. */
1793 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1794 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1796 warn_function_noreturn (fun->decl);
1797 if (dump_file)
1798 fprintf (dump_file, "Function found to be noreturn: %s\n",
1799 current_function_name ());
1801 /* Update declaration and reduce profile to executed once. */
1802 TREE_THIS_VOLATILE (current_function_decl) = 1;
1803 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1804 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1806 changed = true;
1809 switch (l->pure_const_state)
1811 case IPA_CONST:
1812 if (!TREE_READONLY (current_function_decl))
1814 warn_function_const (current_function_decl, !l->looping);
1815 if (dump_file)
1816 fprintf (dump_file, "Function found to be %sconst: %s\n",
1817 l->looping ? "looping " : "",
1818 current_function_name ());
1820 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1821 && !l->looping)
1823 if (dump_file)
1824 fprintf (dump_file, "Function found to be non-looping: %s\n",
1825 current_function_name ());
1827 if (!skip && node->set_const_flag (true, l->looping))
1829 if (dump_file)
1830 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
1831 l->looping ? "looping " : "",
1832 current_function_name ());
1833 changed = true;
1835 break;
1837 case IPA_PURE:
1838 if (!DECL_PURE_P (current_function_decl))
1840 warn_function_pure (current_function_decl, !l->looping);
1841 if (dump_file)
1842 fprintf (dump_file, "Function found to be %spure: %s\n",
1843 l->looping ? "looping " : "",
1844 current_function_name ());
1846 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1847 && !l->looping)
1849 if (dump_file)
1850 fprintf (dump_file, "Function found to be non-looping: %s\n",
1851 current_function_name ());
1853 if (!skip && node->set_pure_flag (true, l->looping))
1855 if (dump_file)
1856 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
1857 l->looping ? "looping " : "",
1858 current_function_name ());
1859 changed = true;
1861 break;
1863 default:
1864 break;
1866 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1868 node->set_nothrow_flag (true);
1869 changed = true;
1870 if (dump_file)
1871 fprintf (dump_file, "Function found to be nothrow: %s\n",
1872 current_function_name ());
1874 free (l);
1875 if (changed)
1876 return execute_fixup_cfg ();
1877 else
1878 return 0;
1881 } // anon namespace
1883 gimple_opt_pass *
1884 make_pass_local_pure_const (gcc::context *ctxt)
1886 return new pass_local_pure_const (ctxt);
1889 /* Emit noreturn warnings. */
1891 namespace {
1893 const pass_data pass_data_warn_function_noreturn =
1895 GIMPLE_PASS, /* type */
1896 "*warn_function_noreturn", /* name */
1897 OPTGROUP_NONE, /* optinfo_flags */
1898 TV_NONE, /* tv_id */
1899 PROP_cfg, /* properties_required */
1900 0, /* properties_provided */
1901 0, /* properties_destroyed */
1902 0, /* todo_flags_start */
1903 0, /* todo_flags_finish */
1906 class pass_warn_function_noreturn : public gimple_opt_pass
1908 public:
1909 pass_warn_function_noreturn (gcc::context *ctxt)
1910 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1913 /* opt_pass methods: */
1914 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
1915 virtual unsigned int execute (function *fun)
1917 if (!TREE_THIS_VOLATILE (current_function_decl)
1918 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1919 warn_function_noreturn (current_function_decl);
1920 return 0;
1923 }; // class pass_warn_function_noreturn
1925 } // anon namespace
1927 gimple_opt_pass *
1928 make_pass_warn_function_noreturn (gcc::context *ctxt)
1930 return new pass_warn_function_noreturn (ctxt);
1933 /* Simple local pass for pure const discovery reusing the analysis from
1934 ipa_pure_const. This pass is effective when executed together with
1935 other optimization passes in early optimization pass queue. */
1937 namespace {
1939 const pass_data pass_data_nothrow =
1941 GIMPLE_PASS, /* type */
1942 "nothrow", /* name */
1943 OPTGROUP_NONE, /* optinfo_flags */
1944 TV_IPA_PURE_CONST, /* tv_id */
1945 0, /* properties_required */
1946 0, /* properties_provided */
1947 0, /* properties_destroyed */
1948 0, /* todo_flags_start */
1949 0, /* todo_flags_finish */
1952 class pass_nothrow : public gimple_opt_pass
1954 public:
1955 pass_nothrow (gcc::context *ctxt)
1956 : gimple_opt_pass (pass_data_nothrow, ctxt)
1959 /* opt_pass methods: */
1960 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
1961 virtual bool gate (function *) { return optimize; }
1962 virtual unsigned int execute (function *);
1964 }; // class pass_nothrow
1966 unsigned int
1967 pass_nothrow::execute (function *)
1969 struct cgraph_node *node;
1970 basic_block this_block;
1972 if (TREE_NOTHROW (current_function_decl))
1973 return 0;
1975 node = cgraph_node::get (current_function_decl);
1977 /* We run during lowering, we can not really use availability yet. */
1978 if (cgraph_node::get (current_function_decl)->get_availability ()
1979 <= AVAIL_INTERPOSABLE)
1981 if (dump_file)
1982 fprintf (dump_file, "Function is interposable;"
1983 " not analyzing.\n");
1984 return true;
1987 FOR_EACH_BB_FN (this_block, cfun)
1989 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1990 !gsi_end_p (gsi);
1991 gsi_next (&gsi))
1992 if (stmt_can_throw_external (gsi_stmt (gsi)))
1994 if (is_gimple_call (gsi_stmt (gsi)))
1996 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1997 if (callee_t && recursive_call_p (current_function_decl,
1998 callee_t))
1999 continue;
2002 if (dump_file)
2004 fprintf (dump_file, "Statement can throw: ");
2005 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2007 return 0;
2011 node->set_nothrow_flag (true);
2013 bool cfg_changed = false;
2014 if (self_recursive_p (node))
2015 FOR_EACH_BB_FN (this_block, cfun)
2016 if (gimple *g = last_stmt (this_block))
2017 if (is_gimple_call (g))
2019 tree callee_t = gimple_call_fndecl (g);
2020 if (callee_t
2021 && recursive_call_p (current_function_decl, callee_t)
2022 && maybe_clean_eh_stmt (g)
2023 && gimple_purge_dead_eh_edges (this_block))
2024 cfg_changed = true;
2027 if (dump_file)
2028 fprintf (dump_file, "Function found to be nothrow: %s\n",
2029 current_function_name ());
2030 return cfg_changed ? TODO_cleanup_cfg : 0;
2033 } // anon namespace
2035 gimple_opt_pass *
2036 make_pass_nothrow (gcc::context *ctxt)
2038 return new pass_nothrow (ctxt);