PR rtl-optimization/71673
[official-gcc.git] / gcc / ipa-pure-const.c
bloba9570e4aa6c56585da55714e426db52046f6dedd
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
60 /* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
62 what is found. */
63 enum pure_const_state_e
65 IPA_CONST,
66 IPA_PURE,
67 IPA_NEITHER
70 const char *pure_const_names[3] = {"const", "pure", "neither"};
72 /* Holder for the const_state. There is one of these per function
73 decl. */
74 struct funct_state_d
76 /* See above. */
77 enum pure_const_state_e pure_const_state;
78 /* What user set here; we can be always sure about this. */
79 enum pure_const_state_e state_previously_known;
80 bool looping_previously_known;
82 /* True if the function could possibly infinite loop. There are a
83 lot of ways that this could be determined. We are pretty
84 conservative here. While it is possible to cse pure and const
85 calls, it is not legal to have dce get rid of the call if there
86 is a possibility that the call could infinite loop since this is
87 a behavioral change. */
88 bool looping;
90 bool can_throw;
92 /* If function can call free, munmap or otherwise make previously
93 non-trapping memory accesses trapping. */
94 bool can_free;
97 /* State used when we know nothing about function. */
98 static struct funct_state_d varying_state
99 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true };
102 typedef struct funct_state_d * funct_state;
104 /* The storage of the funct_state is abstracted because there is the
105 possibility that it may be desirable to move this to the cgraph
106 local info. */
108 /* Array, indexed by cgraph node uid, of function states. */
110 static vec<funct_state> funct_state_vec;
112 static bool gate_pure_const (void);
114 namespace {
116 const pass_data pass_data_ipa_pure_const =
118 IPA_PASS, /* type */
119 "pure-const", /* name */
120 OPTGROUP_NONE, /* optinfo_flags */
121 TV_IPA_PURE_CONST, /* tv_id */
122 0, /* properties_required */
123 0, /* properties_provided */
124 0, /* properties_destroyed */
125 0, /* todo_flags_start */
126 0, /* todo_flags_finish */
129 class pass_ipa_pure_const : public ipa_opt_pass_d
131 public:
132 pass_ipa_pure_const(gcc::context *ctxt);
134 /* opt_pass methods: */
135 bool gate (function *) { return gate_pure_const (); }
136 unsigned int execute (function *fun);
138 void register_hooks (void);
140 private:
141 bool init_p;
143 /* Holders of ipa cgraph hooks: */
144 struct cgraph_node_hook_list *function_insertion_hook_holder;
145 struct cgraph_2node_hook_list *node_duplication_hook_holder;
146 struct cgraph_node_hook_list *node_removal_hook_holder;
148 }; // class pass_ipa_pure_const
150 } // anon namespace
152 /* Try to guess if function body will always be visible to compiler
153 when compiling the call and whether compiler will be able
154 to propagate the information by itself. */
156 static bool
157 function_always_visible_to_compiler_p (tree decl)
159 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
162 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
163 is true if the function is known to be finite. The diagnostic is
164 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
165 OPTION, this function may initialize it and it is always returned
166 by the function. */
168 static hash_set<tree> *
169 suggest_attribute (int option, tree decl, bool known_finite,
170 hash_set<tree> *warned_about,
171 const char * attrib_name)
173 if (!option_enabled (option, &global_options))
174 return warned_about;
175 if (TREE_THIS_VOLATILE (decl)
176 || (known_finite && function_always_visible_to_compiler_p (decl)))
177 return warned_about;
179 if (!warned_about)
180 warned_about = new hash_set<tree>;
181 if (warned_about->contains (decl))
182 return warned_about;
183 warned_about->add (decl);
184 warning_at (DECL_SOURCE_LOCATION (decl),
185 option,
186 known_finite
187 ? _("function might be candidate for attribute %<%s%>")
188 : _("function might be candidate for attribute %<%s%>"
189 " if it is known to return normally"), attrib_name);
190 return warned_about;
193 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
194 is true if the function is known to be finite. */
196 static void
197 warn_function_pure (tree decl, bool known_finite)
199 static hash_set<tree> *warned_about;
201 warned_about
202 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
203 known_finite, warned_about, "pure");
206 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
207 is true if the function is known to be finite. */
209 static void
210 warn_function_const (tree decl, bool known_finite)
212 static hash_set<tree> *warned_about;
213 warned_about
214 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
215 known_finite, warned_about, "const");
218 static void
219 warn_function_noreturn (tree decl)
221 static hash_set<tree> *warned_about;
222 if (!lang_hooks.missing_noreturn_ok_p (decl)
223 && targetm.warn_func_return (decl))
224 warned_about
225 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
226 true, warned_about, "noreturn");
229 /* Return true if we have a function state for NODE. */
231 static inline bool
232 has_function_state (struct cgraph_node *node)
234 if (!funct_state_vec.exists ()
235 || funct_state_vec.length () <= (unsigned int)node->uid)
236 return false;
237 return funct_state_vec[node->uid] != NULL;
240 /* Return the function state from NODE. */
242 static inline funct_state
243 get_function_state (struct cgraph_node *node)
245 if (!funct_state_vec.exists ()
246 || funct_state_vec.length () <= (unsigned int)node->uid
247 || !funct_state_vec[node->uid])
248 /* We might want to put correct previously_known state into varying. */
249 return &varying_state;
250 return funct_state_vec[node->uid];
253 /* Set the function state S for NODE. */
255 static inline void
256 set_function_state (struct cgraph_node *node, funct_state s)
258 if (!funct_state_vec.exists ()
259 || funct_state_vec.length () <= (unsigned int)node->uid)
260 funct_state_vec.safe_grow_cleared (node->uid + 1);
262 /* If funct_state_vec already contains a funct_state, we have to release
263 it before it's going to be ovewritten. */
264 if (funct_state_vec[node->uid] != NULL
265 && funct_state_vec[node->uid] != &varying_state)
266 free (funct_state_vec[node->uid]);
268 funct_state_vec[node->uid] = s;
271 /* Check to see if the use (or definition when CHECKING_WRITE is true)
272 variable T is legal in a function that is either pure or const. */
274 static inline void
275 check_decl (funct_state local,
276 tree t, bool checking_write, bool ipa)
278 /* Do not want to do anything with volatile except mark any
279 function that uses one to be not const or pure. */
280 if (TREE_THIS_VOLATILE (t))
282 local->pure_const_state = IPA_NEITHER;
283 if (dump_file)
284 fprintf (dump_file, " Volatile operand is not const/pure");
285 return;
288 /* Do not care about a local automatic that is not static. */
289 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
290 return;
292 /* If the variable has the "used" attribute, treat it as if it had a
293 been touched by the devil. */
294 if (DECL_PRESERVE_P (t))
296 local->pure_const_state = IPA_NEITHER;
297 if (dump_file)
298 fprintf (dump_file, " Used static/global variable is not const/pure\n");
299 return;
302 /* In IPA mode we are not interested in checking actual loads and stores;
303 they will be processed at propagation time using ipa_ref. */
304 if (ipa)
305 return;
307 /* Since we have dealt with the locals and params cases above, if we
308 are CHECKING_WRITE, this cannot be a pure or constant
309 function. */
310 if (checking_write)
312 local->pure_const_state = IPA_NEITHER;
313 if (dump_file)
314 fprintf (dump_file, " static/global memory write is not const/pure\n");
315 return;
318 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
320 /* Readonly reads are safe. */
321 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
322 return; /* Read of a constant, do not change the function state. */
323 else
325 if (dump_file)
326 fprintf (dump_file, " global memory read is not const\n");
327 /* Just a regular read. */
328 if (local->pure_const_state == IPA_CONST)
329 local->pure_const_state = IPA_PURE;
332 else
334 /* Compilation level statics can be read if they are readonly
335 variables. */
336 if (TREE_READONLY (t))
337 return;
339 if (dump_file)
340 fprintf (dump_file, " static memory read is not const\n");
341 /* Just a regular read. */
342 if (local->pure_const_state == IPA_CONST)
343 local->pure_const_state = IPA_PURE;
348 /* Check to see if the use (or definition when CHECKING_WRITE is true)
349 variable T is legal in a function that is either pure or const. */
351 static inline void
352 check_op (funct_state local, tree t, bool checking_write)
354 t = get_base_address (t);
355 if (t && TREE_THIS_VOLATILE (t))
357 local->pure_const_state = IPA_NEITHER;
358 if (dump_file)
359 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
360 return;
362 else if (t
363 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
364 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
365 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
367 if (dump_file)
368 fprintf (dump_file, " Indirect ref to local memory is OK\n");
369 return;
371 else if (checking_write)
373 local->pure_const_state = IPA_NEITHER;
374 if (dump_file)
375 fprintf (dump_file, " Indirect ref write is not const/pure\n");
376 return;
378 else
380 if (dump_file)
381 fprintf (dump_file, " Indirect ref read is not const\n");
382 if (local->pure_const_state == IPA_CONST)
383 local->pure_const_state = IPA_PURE;
387 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
389 static void
390 state_from_flags (enum pure_const_state_e *state, bool *looping,
391 int flags, bool cannot_lead_to_return)
393 *looping = false;
394 if (flags & ECF_LOOPING_CONST_OR_PURE)
396 *looping = true;
397 if (dump_file && (dump_flags & TDF_DETAILS))
398 fprintf (dump_file, " looping");
400 if (flags & ECF_CONST)
402 *state = IPA_CONST;
403 if (dump_file && (dump_flags & TDF_DETAILS))
404 fprintf (dump_file, " const\n");
406 else if (flags & ECF_PURE)
408 *state = IPA_PURE;
409 if (dump_file && (dump_flags & TDF_DETAILS))
410 fprintf (dump_file, " pure\n");
412 else if (cannot_lead_to_return)
414 *state = IPA_PURE;
415 *looping = true;
416 if (dump_file && (dump_flags & TDF_DETAILS))
417 fprintf (dump_file, " ignoring side effects->pure looping\n");
419 else
421 if (dump_file && (dump_flags & TDF_DETAILS))
422 fprintf (dump_file, " neither\n");
423 *state = IPA_NEITHER;
424 *looping = true;
428 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
429 into STATE and LOOPING better of the two variants.
430 Be sure to merge looping correctly. IPA_NEITHER functions
431 have looping 0 even if they don't have to return. */
433 static inline void
434 better_state (enum pure_const_state_e *state, bool *looping,
435 enum pure_const_state_e state2, bool looping2)
437 if (state2 < *state)
439 if (*state == IPA_NEITHER)
440 *looping = looping2;
441 else
442 *looping = MIN (*looping, looping2);
443 *state = state2;
445 else if (state2 != IPA_NEITHER)
446 *looping = MIN (*looping, looping2);
449 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING worse of the two variants.
451 N is the actual node called. */
453 static inline void
454 worse_state (enum pure_const_state_e *state, bool *looping,
455 enum pure_const_state_e state2, bool looping2,
456 struct symtab_node *from,
457 struct symtab_node *to)
459 /* Consider function:
461 bool a(int *p)
463 return *p==*p;
466 During early optimization we will turn this into:
468 bool a(int *p)
470 return true;
473 Now if this function will be detected as CONST however when interposed it
474 may end up being just pure. We always must assume the worst scenario here.
476 if (*state == IPA_CONST && state2 == IPA_CONST
477 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
479 if (dump_file && (dump_flags & TDF_DETAILS))
480 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
481 "bind to current def.\n", to->name ());
482 state2 = IPA_PURE;
484 *state = MAX (*state, state2);
485 *looping = MAX (*looping, looping2);
488 /* Recognize special cases of builtins that are by themselves not pure or const
489 but function using them is. */
490 static bool
491 special_builtin_state (enum pure_const_state_e *state, bool *looping,
492 tree callee)
494 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
495 switch (DECL_FUNCTION_CODE (callee))
497 case BUILT_IN_RETURN:
498 case BUILT_IN_UNREACHABLE:
499 case BUILT_IN_ALLOCA:
500 case BUILT_IN_ALLOCA_WITH_ALIGN:
501 case BUILT_IN_STACK_SAVE:
502 case BUILT_IN_STACK_RESTORE:
503 case BUILT_IN_EH_POINTER:
504 case BUILT_IN_EH_FILTER:
505 case BUILT_IN_UNWIND_RESUME:
506 case BUILT_IN_CXA_END_CLEANUP:
507 case BUILT_IN_EH_COPY_VALUES:
508 case BUILT_IN_FRAME_ADDRESS:
509 case BUILT_IN_APPLY:
510 case BUILT_IN_APPLY_ARGS:
511 *looping = false;
512 *state = IPA_CONST;
513 return true;
514 case BUILT_IN_PREFETCH:
515 *looping = true;
516 *state = IPA_CONST;
517 return true;
518 default:
519 break;
521 return false;
524 /* Check the parameters of a function call to CALL_EXPR to see if
525 there are any references in the parameters that are not allowed for
526 pure or const functions. Also check to see if this is either an
527 indirect call, a call outside the compilation unit, or has special
528 attributes that may also effect the purity. The CALL_EXPR node for
529 the entire call expression. */
531 static void
532 check_call (funct_state local, gcall *call, bool ipa)
534 int flags = gimple_call_flags (call);
535 tree callee_t = gimple_call_fndecl (call);
536 bool possibly_throws = stmt_could_throw_p (call);
537 bool possibly_throws_externally = (possibly_throws
538 && stmt_can_throw_external (call));
540 if (possibly_throws)
542 unsigned int i;
543 for (i = 0; i < gimple_num_ops (call); i++)
544 if (gimple_op (call, i)
545 && tree_could_throw_p (gimple_op (call, i)))
547 if (possibly_throws && cfun->can_throw_non_call_exceptions)
549 if (dump_file)
550 fprintf (dump_file, " operand can throw; looping\n");
551 local->looping = true;
553 if (possibly_throws_externally)
555 if (dump_file)
556 fprintf (dump_file, " operand can throw externally\n");
557 local->can_throw = true;
562 /* The const and pure flags are set by a variety of places in the
563 compiler (including here). If someone has already set the flags
564 for the callee, (such as for some of the builtins) we will use
565 them, otherwise we will compute our own information.
567 Const and pure functions have less clobber effects than other
568 functions so we process these first. Otherwise if it is a call
569 outside the compilation unit or an indirect call we punt. This
570 leaves local calls which will be processed by following the call
571 graph. */
572 if (callee_t)
574 enum pure_const_state_e call_state;
575 bool call_looping;
577 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
578 && !nonfreeing_call_p (call))
579 local->can_free = true;
581 if (special_builtin_state (&call_state, &call_looping, callee_t))
583 worse_state (&local->pure_const_state, &local->looping,
584 call_state, call_looping,
585 NULL, NULL);
586 return;
588 /* When bad things happen to bad functions, they cannot be const
589 or pure. */
590 if (setjmp_call_p (callee_t))
592 if (dump_file)
593 fprintf (dump_file, " setjmp is not const/pure\n");
594 local->looping = true;
595 local->pure_const_state = IPA_NEITHER;
598 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
599 switch (DECL_FUNCTION_CODE (callee_t))
601 case BUILT_IN_LONGJMP:
602 case BUILT_IN_NONLOCAL_GOTO:
603 if (dump_file)
604 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
605 local->pure_const_state = IPA_NEITHER;
606 local->looping = true;
607 break;
608 default:
609 break;
612 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
613 local->can_free = true;
615 /* When not in IPA mode, we can still handle self recursion. */
616 if (!ipa && callee_t
617 && recursive_call_p (current_function_decl, callee_t))
619 if (dump_file)
620 fprintf (dump_file, " Recursive call can loop.\n");
621 local->looping = true;
623 /* Either callee is unknown or we are doing local analysis.
624 Look to see if there are any bits available for the callee (such as by
625 declaration or because it is builtin) and process solely on the basis of
626 those bits. Handle internal calls always, those calls don't have
627 corresponding cgraph edges and thus aren't processed during
628 the propagation. */
629 else if (!ipa || gimple_call_internal_p (call))
631 enum pure_const_state_e call_state;
632 bool call_looping;
633 if (possibly_throws && cfun->can_throw_non_call_exceptions)
635 if (dump_file)
636 fprintf (dump_file, " can throw; looping\n");
637 local->looping = true;
639 if (possibly_throws_externally)
641 if (dump_file)
643 fprintf (dump_file, " can throw externally to lp %i\n",
644 lookup_stmt_eh_lp (call));
645 if (callee_t)
646 fprintf (dump_file, " callee:%s\n",
647 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
649 local->can_throw = true;
651 if (dump_file && (dump_flags & TDF_DETAILS))
652 fprintf (dump_file, " checking flags for call:");
653 state_from_flags (&call_state, &call_looping, flags,
654 ((flags & (ECF_NORETURN | ECF_NOTHROW))
655 == (ECF_NORETURN | ECF_NOTHROW))
656 || (!flag_exceptions && (flags & ECF_NORETURN)));
657 worse_state (&local->pure_const_state, &local->looping,
658 call_state, call_looping, NULL, NULL);
660 /* Direct functions calls are handled by IPA propagation. */
663 /* Wrapper around check_decl for loads in local more. */
665 static bool
666 check_load (gimple *, tree op, tree, void *data)
668 if (DECL_P (op))
669 check_decl ((funct_state)data, op, false, false);
670 else
671 check_op ((funct_state)data, op, false);
672 return false;
675 /* Wrapper around check_decl for stores in local more. */
677 static bool
678 check_store (gimple *, tree op, tree, void *data)
680 if (DECL_P (op))
681 check_decl ((funct_state)data, op, true, false);
682 else
683 check_op ((funct_state)data, op, true);
684 return false;
687 /* Wrapper around check_decl for loads in ipa mode. */
689 static bool
690 check_ipa_load (gimple *, tree op, tree, void *data)
692 if (DECL_P (op))
693 check_decl ((funct_state)data, op, false, true);
694 else
695 check_op ((funct_state)data, op, false);
696 return false;
699 /* Wrapper around check_decl for stores in ipa mode. */
701 static bool
702 check_ipa_store (gimple *, tree op, tree, void *data)
704 if (DECL_P (op))
705 check_decl ((funct_state)data, op, true, true);
706 else
707 check_op ((funct_state)data, op, true);
708 return false;
711 /* Look into pointer pointed to by GSIP and figure out what interesting side
712 effects it has. */
713 static void
714 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
716 gimple *stmt = gsi_stmt (*gsip);
718 if (is_gimple_debug (stmt))
719 return;
721 /* Do consider clobber as side effects before IPA, so we rather inline
722 C++ destructors and keep clobber semantics than eliminate them.
724 TODO: We may get smarter during early optimizations on these and let
725 functions containing only clobbers to be optimized more. This is a common
726 case of C++ destructors. */
728 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
729 return;
731 if (dump_file)
733 fprintf (dump_file, " scanning: ");
734 print_gimple_stmt (dump_file, stmt, 0, 0);
737 if (gimple_has_volatile_ops (stmt)
738 && !gimple_clobber_p (stmt))
740 local->pure_const_state = IPA_NEITHER;
741 if (dump_file)
742 fprintf (dump_file, " Volatile stmt is not const/pure\n");
745 /* Look for loads and stores. */
746 walk_stmt_load_store_ops (stmt, local,
747 ipa ? check_ipa_load : check_load,
748 ipa ? check_ipa_store : check_store);
750 if (gimple_code (stmt) != GIMPLE_CALL
751 && stmt_could_throw_p (stmt))
753 if (cfun->can_throw_non_call_exceptions)
755 if (dump_file)
756 fprintf (dump_file, " can throw; looping\n");
757 local->looping = true;
759 if (stmt_can_throw_external (stmt))
761 if (dump_file)
762 fprintf (dump_file, " can throw externally\n");
763 local->can_throw = true;
765 else
766 if (dump_file)
767 fprintf (dump_file, " can throw\n");
769 switch (gimple_code (stmt))
771 case GIMPLE_CALL:
772 check_call (local, as_a <gcall *> (stmt), ipa);
773 break;
774 case GIMPLE_LABEL:
775 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
776 /* Target of long jump. */
778 if (dump_file)
779 fprintf (dump_file, " nonlocal label is not const/pure\n");
780 local->pure_const_state = IPA_NEITHER;
782 break;
783 case GIMPLE_ASM:
784 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
786 if (dump_file)
787 fprintf (dump_file, " memory asm clobber is not const/pure\n");
788 /* Abandon all hope, ye who enter here. */
789 local->pure_const_state = IPA_NEITHER;
790 local->can_free = true;
792 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
794 if (dump_file)
795 fprintf (dump_file, " volatile is not const/pure\n");
796 /* Abandon all hope, ye who enter here. */
797 local->pure_const_state = IPA_NEITHER;
798 local->looping = true;
799 local->can_free = true;
801 return;
802 default:
803 break;
808 /* This is the main routine for finding the reference patterns for
809 global variables within a function FN. */
811 static funct_state
812 analyze_function (struct cgraph_node *fn, bool ipa)
814 tree decl = fn->decl;
815 funct_state l;
816 basic_block this_block;
818 l = XCNEW (struct funct_state_d);
819 l->pure_const_state = IPA_CONST;
820 l->state_previously_known = IPA_NEITHER;
821 l->looping_previously_known = true;
822 l->looping = false;
823 l->can_throw = false;
824 l->can_free = false;
825 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
826 flags_from_decl_or_type (fn->decl),
827 fn->cannot_return_p ());
829 if (fn->thunk.thunk_p || fn->alias)
831 /* Thunk gets propagated through, so nothing interesting happens. */
832 gcc_assert (ipa);
833 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
834 l->pure_const_state = IPA_NEITHER;
835 return l;
838 if (dump_file)
840 fprintf (dump_file, "\n\n local analysis of %s\n ",
841 fn->name ());
844 push_cfun (DECL_STRUCT_FUNCTION (decl));
846 FOR_EACH_BB_FN (this_block, cfun)
848 gimple_stmt_iterator gsi;
849 struct walk_stmt_info wi;
851 memset (&wi, 0, sizeof (wi));
852 for (gsi = gsi_start_bb (this_block);
853 !gsi_end_p (gsi);
854 gsi_next (&gsi))
856 check_stmt (&gsi, l, ipa);
857 if (l->pure_const_state == IPA_NEITHER
858 && l->looping
859 && l->can_throw
860 && l->can_free)
861 goto end;
865 end:
866 if (l->pure_const_state != IPA_NEITHER)
868 /* Const functions cannot have back edges (an
869 indication of possible infinite loop side
870 effect. */
871 if (mark_dfs_back_edges ())
873 /* Preheaders are needed for SCEV to work.
874 Simple latches and recorded exits improve chances that loop will
875 proved to be finite in testcases such as in loop-15.c
876 and loop-24.c */
877 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
878 | LOOPS_HAVE_SIMPLE_LATCHES
879 | LOOPS_HAVE_RECORDED_EXITS);
880 if (dump_file && (dump_flags & TDF_DETAILS))
881 flow_loops_dump (dump_file, NULL, 0);
882 if (mark_irreducible_loops ())
884 if (dump_file)
885 fprintf (dump_file, " has irreducible loops\n");
886 l->looping = true;
888 else
890 struct loop *loop;
891 scev_initialize ();
892 FOR_EACH_LOOP (loop, 0)
893 if (!finite_loop_p (loop))
895 if (dump_file)
896 fprintf (dump_file, " can not prove finiteness of "
897 "loop %i\n", loop->num);
898 l->looping =true;
899 break;
901 scev_finalize ();
903 loop_optimizer_finalize ();
907 if (dump_file && (dump_flags & TDF_DETAILS))
908 fprintf (dump_file, " checking previously known:");
910 better_state (&l->pure_const_state, &l->looping,
911 l->state_previously_known,
912 l->looping_previously_known);
913 if (TREE_NOTHROW (decl))
914 l->can_throw = false;
916 pop_cfun ();
917 if (dump_file)
919 if (l->looping)
920 fprintf (dump_file, "Function is locally looping.\n");
921 if (l->can_throw)
922 fprintf (dump_file, "Function is locally throwing.\n");
923 if (l->pure_const_state == IPA_CONST)
924 fprintf (dump_file, "Function is locally const.\n");
925 if (l->pure_const_state == IPA_PURE)
926 fprintf (dump_file, "Function is locally pure.\n");
927 if (l->can_free)
928 fprintf (dump_file, "Function can locally free.\n");
930 return l;
933 /* Called when new function is inserted to callgraph late. */
934 static void
935 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
937 /* There are some shared nodes, in particular the initializers on
938 static declarations. We do not need to scan them more than once
939 since all we would be interested in are the addressof
940 operations. */
941 if (opt_for_fn (node->decl, flag_ipa_pure_const))
942 set_function_state (node, analyze_function (node, true));
945 /* Called when new clone is inserted to callgraph late. */
947 static void
948 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
949 void *data ATTRIBUTE_UNUSED)
951 if (has_function_state (src))
953 funct_state l = XNEW (struct funct_state_d);
954 gcc_assert (!has_function_state (dst));
955 memcpy (l, get_function_state (src), sizeof (*l));
956 set_function_state (dst, l);
960 /* Called when new clone is inserted to callgraph late. */
962 static void
963 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
965 if (has_function_state (node))
966 set_function_state (node, NULL);
970 void
971 pass_ipa_pure_const::
972 register_hooks (void)
974 if (init_p)
975 return;
977 init_p = true;
979 node_removal_hook_holder =
980 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
981 node_duplication_hook_holder =
982 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
983 function_insertion_hook_holder =
984 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
988 /* Analyze each function in the cgraph to see if it is locally PURE or
989 CONST. */
991 static void
992 pure_const_generate_summary (void)
994 struct cgraph_node *node;
996 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
997 pass->register_hooks ();
999 /* Process all of the functions.
1001 We process AVAIL_INTERPOSABLE functions. We can not use the results
1002 by default, but the info can be used at LTO with -fwhole-program or
1003 when function got cloned and the clone is AVAILABLE. */
1005 FOR_EACH_DEFINED_FUNCTION (node)
1006 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1007 set_function_state (node, analyze_function (node, true));
1011 /* Serialize the ipa info for lto. */
1013 static void
1014 pure_const_write_summary (void)
1016 struct cgraph_node *node;
1017 struct lto_simple_output_block *ob
1018 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1019 unsigned int count = 0;
1020 lto_symtab_encoder_iterator lsei;
1021 lto_symtab_encoder_t encoder;
1023 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1025 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1026 lsei_next_function_in_partition (&lsei))
1028 node = lsei_cgraph_node (lsei);
1029 if (node->definition && has_function_state (node))
1030 count++;
1033 streamer_write_uhwi_stream (ob->main_stream, count);
1035 /* Process all of the functions. */
1036 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1037 lsei_next_function_in_partition (&lsei))
1039 node = lsei_cgraph_node (lsei);
1040 if (node->definition && has_function_state (node))
1042 struct bitpack_d bp;
1043 funct_state fs;
1044 int node_ref;
1045 lto_symtab_encoder_t encoder;
1047 fs = get_function_state (node);
1049 encoder = ob->decl_state->symtab_node_encoder;
1050 node_ref = lto_symtab_encoder_encode (encoder, node);
1051 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1053 /* Note that flags will need to be read in the opposite
1054 order as we are pushing the bitflags into FLAGS. */
1055 bp = bitpack_create (ob->main_stream);
1056 bp_pack_value (&bp, fs->pure_const_state, 2);
1057 bp_pack_value (&bp, fs->state_previously_known, 2);
1058 bp_pack_value (&bp, fs->looping_previously_known, 1);
1059 bp_pack_value (&bp, fs->looping, 1);
1060 bp_pack_value (&bp, fs->can_throw, 1);
1061 bp_pack_value (&bp, fs->can_free, 1);
1062 streamer_write_bitpack (&bp);
1066 lto_destroy_simple_output_block (ob);
1070 /* Deserialize the ipa info for lto. */
1072 static void
1073 pure_const_read_summary (void)
1075 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1076 struct lto_file_decl_data *file_data;
1077 unsigned int j = 0;
1079 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1080 pass->register_hooks ();
1082 while ((file_data = file_data_vec[j++]))
1084 const char *data;
1085 size_t len;
1086 struct lto_input_block *ib
1087 = lto_create_simple_input_block (file_data,
1088 LTO_section_ipa_pure_const,
1089 &data, &len);
1090 if (ib)
1092 unsigned int i;
1093 unsigned int count = streamer_read_uhwi (ib);
1095 for (i = 0; i < count; i++)
1097 unsigned int index;
1098 struct cgraph_node *node;
1099 struct bitpack_d bp;
1100 funct_state fs;
1101 lto_symtab_encoder_t encoder;
1103 fs = XCNEW (struct funct_state_d);
1104 index = streamer_read_uhwi (ib);
1105 encoder = file_data->symtab_node_encoder;
1106 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1107 index));
1108 set_function_state (node, fs);
1110 /* Note that the flags must be read in the opposite
1111 order in which they were written (the bitflags were
1112 pushed into FLAGS). */
1113 bp = streamer_read_bitpack (ib);
1114 fs->pure_const_state
1115 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1116 fs->state_previously_known
1117 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1118 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1119 fs->looping = bp_unpack_value (&bp, 1);
1120 fs->can_throw = bp_unpack_value (&bp, 1);
1121 fs->can_free = bp_unpack_value (&bp, 1);
1122 if (dump_file)
1124 int flags = flags_from_decl_or_type (node->decl);
1125 fprintf (dump_file, "Read info for %s/%i ",
1126 node->name (),
1127 node->order);
1128 if (flags & ECF_CONST)
1129 fprintf (dump_file, " const");
1130 if (flags & ECF_PURE)
1131 fprintf (dump_file, " pure");
1132 if (flags & ECF_NOTHROW)
1133 fprintf (dump_file, " nothrow");
1134 fprintf (dump_file, "\n pure const state: %s\n",
1135 pure_const_names[fs->pure_const_state]);
1136 fprintf (dump_file, " previously known state: %s\n",
1137 pure_const_names[fs->state_previously_known]);
1138 if (fs->looping)
1139 fprintf (dump_file," function is locally looping\n");
1140 if (fs->looping_previously_known)
1141 fprintf (dump_file," function is previously known looping\n");
1142 if (fs->can_throw)
1143 fprintf (dump_file," function is locally throwing\n");
1144 if (fs->can_free)
1145 fprintf (dump_file," function can locally free\n");
1149 lto_destroy_simple_input_block (file_data,
1150 LTO_section_ipa_pure_const,
1151 ib, data, len);
1156 /* We only propagate across edges that can throw externally and their callee
1157 is not interposable. */
1159 static bool
1160 ignore_edge_for_nothrow (struct cgraph_edge *e)
1162 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1163 return true;
1165 enum availability avail;
1166 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1167 e->caller);
1168 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1169 return true;
1170 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1171 && !e->callee->binds_to_current_def_p (e->caller);
1174 /* Return true if NODE is self recursive function.
1175 Indirectly recursive functions appears as non-trivial strongly
1176 connected components, so we need to care about self recursion
1177 only. */
1179 static bool
1180 self_recursive_p (struct cgraph_node *node)
1182 struct cgraph_edge *e;
1183 for (e = node->callees; e; e = e->next_callee)
1184 if (e->callee->function_symbol () == node)
1185 return true;
1186 return false;
1189 /* Return true if N is cdtor that is not const or pure. In this case we may
1190 need to remove unreachable function if it is marked const/pure. */
1192 static bool
1193 cdtor_p (cgraph_node *n, void *)
1195 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1196 return !TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl);
1197 return false;
1200 /* We only propagate across edges with non-interposable callee. */
1202 static bool
1203 ignore_edge_for_pure_const (struct cgraph_edge *e)
1205 enum availability avail;
1206 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1207 return (avail <= AVAIL_INTERPOSABLE);
1211 /* Produce transitive closure over the callgraph and compute pure/const
1212 attributes. */
1214 static bool
1215 propagate_pure_const (void)
1217 struct cgraph_node *node;
1218 struct cgraph_node *w;
1219 struct cgraph_node **order =
1220 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1221 int order_pos;
1222 int i;
1223 struct ipa_dfs_info * w_info;
1224 bool remove_p = false;
1225 bool has_cdtor;
1227 order_pos = ipa_reduced_postorder (order, true, false,
1228 ignore_edge_for_pure_const);
1229 if (dump_file)
1231 cgraph_node::dump_cgraph (dump_file);
1232 ipa_print_order (dump_file, "reduced", order, order_pos);
1235 /* Propagate the local information through the call graph to produce
1236 the global information. All the nodes within a cycle will have
1237 the same info so we collapse cycles first. Then we can do the
1238 propagation in one pass from the leaves to the roots. */
1239 for (i = 0; i < order_pos; i++ )
1241 enum pure_const_state_e pure_const_state = IPA_CONST;
1242 bool looping = false;
1243 int count = 0;
1244 node = order[i];
1246 if (node->alias)
1247 continue;
1249 if (dump_file && (dump_flags & TDF_DETAILS))
1250 fprintf (dump_file, "Starting cycle\n");
1252 /* Find the worst state for any node in the cycle. */
1253 w = node;
1254 while (w && pure_const_state != IPA_NEITHER)
1256 struct cgraph_edge *e;
1257 struct cgraph_edge *ie;
1258 int i;
1259 struct ipa_ref *ref = NULL;
1261 funct_state w_l = get_function_state (w);
1262 if (dump_file && (dump_flags & TDF_DETAILS))
1263 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1264 w->name (),
1265 w->order,
1266 pure_const_names[w_l->pure_const_state],
1267 w_l->looping);
1269 /* First merge in function body properties.
1270 We are safe to pass NULL as FROM and TO because we will take care
1271 of possible interposition when walking callees. */
1272 worse_state (&pure_const_state, &looping,
1273 w_l->pure_const_state, w_l->looping,
1274 NULL, NULL);
1275 if (pure_const_state == IPA_NEITHER)
1276 break;
1278 count++;
1280 /* We consider recursive cycles as possibly infinite.
1281 This might be relaxed since infinite recursion leads to stack
1282 overflow. */
1283 if (count > 1)
1284 looping = true;
1286 /* Now walk the edges and merge in callee properties. */
1287 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1288 e = e->next_callee)
1290 enum availability avail;
1291 struct cgraph_node *y = e->callee->
1292 function_or_virtual_thunk_symbol (&avail,
1293 e->caller);
1294 enum pure_const_state_e edge_state = IPA_CONST;
1295 bool edge_looping = false;
1297 if (dump_file && (dump_flags & TDF_DETAILS))
1299 fprintf (dump_file,
1300 " Call to %s/%i",
1301 e->callee->name (),
1302 e->callee->order);
1304 if (avail > AVAIL_INTERPOSABLE)
1306 funct_state y_l = get_function_state (y);
1307 if (dump_file && (dump_flags & TDF_DETAILS))
1309 fprintf (dump_file,
1310 " state:%s looping:%i\n",
1311 pure_const_names[y_l->pure_const_state],
1312 y_l->looping);
1314 if (y_l->pure_const_state > IPA_PURE
1315 && e->cannot_lead_to_return_p ())
1317 if (dump_file && (dump_flags & TDF_DETAILS))
1318 fprintf (dump_file,
1319 " Ignoring side effects"
1320 " -> pure, looping\n");
1321 edge_state = IPA_PURE;
1322 edge_looping = true;
1324 else
1326 edge_state = y_l->pure_const_state;
1327 edge_looping = y_l->looping;
1330 else if (special_builtin_state (&edge_state, &edge_looping,
1331 y->decl))
1333 else
1334 state_from_flags (&edge_state, &edge_looping,
1335 flags_from_decl_or_type (y->decl),
1336 e->cannot_lead_to_return_p ());
1338 /* Merge the results with what we already know. */
1339 better_state (&edge_state, &edge_looping,
1340 w_l->state_previously_known,
1341 w_l->looping_previously_known);
1342 worse_state (&pure_const_state, &looping,
1343 edge_state, edge_looping, e->caller, e->callee);
1344 if (pure_const_state == IPA_NEITHER)
1345 break;
1348 /* Now process the indirect call. */
1349 for (ie = w->indirect_calls;
1350 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1352 enum pure_const_state_e edge_state = IPA_CONST;
1353 bool edge_looping = false;
1355 if (dump_file && (dump_flags & TDF_DETAILS))
1356 fprintf (dump_file, " Indirect call");
1357 state_from_flags (&edge_state, &edge_looping,
1358 ie->indirect_info->ecf_flags,
1359 ie->cannot_lead_to_return_p ());
1360 /* Merge the results with what we already know. */
1361 better_state (&edge_state, &edge_looping,
1362 w_l->state_previously_known,
1363 w_l->looping_previously_known);
1364 worse_state (&pure_const_state, &looping,
1365 edge_state, edge_looping, NULL, NULL);
1366 if (pure_const_state == IPA_NEITHER)
1367 break;
1370 /* And finally all loads and stores. */
1371 for (i = 0; w->iterate_reference (i, ref)
1372 && pure_const_state != IPA_NEITHER; i++)
1374 enum pure_const_state_e ref_state = IPA_CONST;
1375 bool ref_looping = false;
1376 switch (ref->use)
1378 case IPA_REF_LOAD:
1379 /* readonly reads are safe. */
1380 if (TREE_READONLY (ref->referred->decl))
1381 break;
1382 if (dump_file && (dump_flags & TDF_DETAILS))
1383 fprintf (dump_file, " nonreadonly global var read\n");
1384 ref_state = IPA_PURE;
1385 break;
1386 case IPA_REF_STORE:
1387 if (ref->cannot_lead_to_return ())
1388 break;
1389 ref_state = IPA_NEITHER;
1390 if (dump_file && (dump_flags & TDF_DETAILS))
1391 fprintf (dump_file, " global var write\n");
1392 break;
1393 case IPA_REF_ADDR:
1394 case IPA_REF_CHKP:
1395 break;
1396 default:
1397 gcc_unreachable ();
1399 better_state (&ref_state, &ref_looping,
1400 w_l->state_previously_known,
1401 w_l->looping_previously_known);
1402 worse_state (&pure_const_state, &looping,
1403 ref_state, ref_looping, NULL, NULL);
1404 if (pure_const_state == IPA_NEITHER)
1405 break;
1407 w_info = (struct ipa_dfs_info *) w->aux;
1408 w = w_info->next_cycle;
1410 if (dump_file && (dump_flags & TDF_DETAILS))
1411 fprintf (dump_file, "Result %s looping %i\n",
1412 pure_const_names [pure_const_state],
1413 looping);
1415 /* Find the worst state of can_free for any node in the cycle. */
1416 bool can_free = false;
1417 w = node;
1418 while (w && !can_free)
1420 struct cgraph_edge *e;
1421 funct_state w_l = get_function_state (w);
1423 if (w_l->can_free
1424 || w->get_availability () == AVAIL_INTERPOSABLE
1425 || w->indirect_calls)
1426 can_free = true;
1428 for (e = w->callees; e && !can_free; e = e->next_callee)
1430 enum availability avail;
1431 struct cgraph_node *y = e->callee->
1432 function_or_virtual_thunk_symbol (&avail,
1433 e->caller);
1435 if (avail > AVAIL_INTERPOSABLE)
1436 can_free = get_function_state (y)->can_free;
1437 else
1438 can_free = true;
1440 w_info = (struct ipa_dfs_info *) w->aux;
1441 w = w_info->next_cycle;
1444 /* Copy back the region's pure_const_state which is shared by
1445 all nodes in the region. */
1446 w = node;
1447 while (w)
1449 funct_state w_l = get_function_state (w);
1450 enum pure_const_state_e this_state = pure_const_state;
1451 bool this_looping = looping;
1453 w_l->can_free = can_free;
1454 w->nonfreeing_fn = !can_free;
1455 if (!can_free && dump_file)
1456 fprintf (dump_file, "Function found not to call free: %s\n",
1457 w->name ());
1459 if (w_l->state_previously_known != IPA_NEITHER
1460 && this_state > w_l->state_previously_known)
1462 this_state = w_l->state_previously_known;
1463 if (this_state == IPA_NEITHER)
1464 this_looping = w_l->looping_previously_known;
1466 if (!this_looping && self_recursive_p (w))
1467 this_looping = true;
1468 if (!w_l->looping_previously_known)
1469 this_looping = false;
1471 /* All nodes within a cycle share the same info. */
1472 w_l->pure_const_state = this_state;
1473 w_l->looping = this_looping;
1475 /* Inline clones share declaration with their offline copies;
1476 do not modify their declarations since the offline copy may
1477 be different. */
1478 if (!w->global.inlined_to)
1479 switch (this_state)
1481 case IPA_CONST:
1482 if (!TREE_READONLY (w->decl))
1484 warn_function_const (w->decl, !this_looping);
1485 if (dump_file)
1486 fprintf (dump_file, "Function found to be %sconst: %s\n",
1487 this_looping ? "looping " : "",
1488 w->name ());
1490 /* Turning constructor or destructor to non-looping const/pure
1491 enables us to possibly remove the function completely. */
1492 if (this_looping)
1493 has_cdtor = false;
1494 else
1495 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1496 NULL, true);
1497 if (w->set_const_flag (true, this_looping))
1499 if (dump_file)
1500 fprintf (dump_file,
1501 "Declaration updated to be %sconst: %s\n",
1502 this_looping ? "looping " : "",
1503 w->name ());
1504 remove_p |= has_cdtor;
1506 break;
1508 case IPA_PURE:
1509 if (!DECL_PURE_P (w->decl))
1511 warn_function_pure (w->decl, !this_looping);
1512 if (dump_file)
1513 fprintf (dump_file, "Function found to be %spure: %s\n",
1514 this_looping ? "looping " : "",
1515 w->name ());
1517 if (this_looping)
1518 has_cdtor = false;
1519 else
1520 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1521 NULL, true);
1522 if (w->set_pure_flag (true, this_looping))
1524 if (dump_file)
1525 fprintf (dump_file,
1526 "Declaration updated to be %spure: %s\n",
1527 this_looping ? "looping " : "",
1528 w->name ());
1529 remove_p |= has_cdtor;
1531 break;
1533 default:
1534 break;
1536 w_info = (struct ipa_dfs_info *) w->aux;
1537 w = w_info->next_cycle;
1541 ipa_free_postorder_info ();
1542 free (order);
1543 return remove_p;
1546 /* Produce transitive closure over the callgraph and compute nothrow
1547 attributes. */
1549 static void
1550 propagate_nothrow (void)
1552 struct cgraph_node *node;
1553 struct cgraph_node *w;
1554 struct cgraph_node **order =
1555 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1556 int order_pos;
1557 int i;
1558 struct ipa_dfs_info * w_info;
1560 order_pos = ipa_reduced_postorder (order, true, false,
1561 ignore_edge_for_nothrow);
1562 if (dump_file)
1564 cgraph_node::dump_cgraph (dump_file);
1565 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1568 /* Propagate the local information through the call graph to produce
1569 the global information. All the nodes within a cycle will have
1570 the same info so we collapse cycles first. Then we can do the
1571 propagation in one pass from the leaves to the roots. */
1572 for (i = 0; i < order_pos; i++ )
1574 bool can_throw = false;
1575 node = order[i];
1577 if (node->alias)
1578 continue;
1580 /* Find the worst state for any node in the cycle. */
1581 w = node;
1582 while (w && !can_throw)
1584 struct cgraph_edge *e, *ie;
1586 if (!TREE_NOTHROW (w->decl))
1588 funct_state w_l = get_function_state (w);
1590 if (w_l->can_throw
1591 || w->get_availability () == AVAIL_INTERPOSABLE)
1592 can_throw = true;
1594 for (e = w->callees; e && !can_throw; e = e->next_callee)
1596 enum availability avail;
1598 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1599 continue;
1601 struct cgraph_node *y = e->callee->
1602 function_or_virtual_thunk_symbol (&avail,
1603 e->caller);
1605 /* We can use info about the callee only if we know it can
1606 not be interposed.
1607 When callee is compiled with non-call exceptions we also
1608 must check that the declaration is bound to current
1609 body as other semantically equivalent body may still
1610 throw. */
1611 if (avail <= AVAIL_INTERPOSABLE
1612 || (!TREE_NOTHROW (y->decl)
1613 && (get_function_state (y)->can_throw
1614 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1615 && !e->callee->binds_to_current_def_p (w)))))
1616 can_throw = true;
1618 for (ie = w->indirect_calls; ie && !can_throw;
1619 ie = ie->next_callee)
1620 if (ie->can_throw_external
1621 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1622 can_throw = true;
1624 w_info = (struct ipa_dfs_info *) w->aux;
1625 w = w_info->next_cycle;
1628 /* Copy back the region's pure_const_state which is shared by
1629 all nodes in the region. */
1630 w = node;
1631 while (w)
1633 funct_state w_l = get_function_state (w);
1634 if (!can_throw && !TREE_NOTHROW (w->decl))
1636 /* Inline clones share declaration with their offline copies;
1637 do not modify their declarations since the offline copy may
1638 be different. */
1639 if (!w->global.inlined_to)
1641 w->set_nothrow_flag (true);
1642 if (dump_file)
1643 fprintf (dump_file, "Function found to be nothrow: %s\n",
1644 w->name ());
1647 else if (can_throw && !TREE_NOTHROW (w->decl))
1648 w_l->can_throw = true;
1649 w_info = (struct ipa_dfs_info *) w->aux;
1650 w = w_info->next_cycle;
1654 ipa_free_postorder_info ();
1655 free (order);
1659 /* Produce the global information by preforming a transitive closure
1660 on the local information that was produced by generate_summary. */
1662 unsigned int
1663 pass_ipa_pure_const::
1664 execute (function *)
1666 struct cgraph_node *node;
1667 bool remove_p;
1669 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
1670 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
1671 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
1673 /* Nothrow makes more function to not lead to return and improve
1674 later analysis. */
1675 propagate_nothrow ();
1676 remove_p = propagate_pure_const ();
1678 /* Cleanup. */
1679 FOR_EACH_FUNCTION (node)
1680 if (has_function_state (node))
1681 free (get_function_state (node));
1682 funct_state_vec.release ();
1683 return remove_p ? TODO_remove_functions : 0;
1686 static bool
1687 gate_pure_const (void)
1689 return flag_ipa_pure_const || in_lto_p;
1692 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1693 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1694 pure_const_generate_summary, /* generate_summary */
1695 pure_const_write_summary, /* write_summary */
1696 pure_const_read_summary, /* read_summary */
1697 NULL, /* write_optimization_summary */
1698 NULL, /* read_optimization_summary */
1699 NULL, /* stmt_fixup */
1700 0, /* function_transform_todo_flags_start */
1701 NULL, /* function_transform */
1702 NULL), /* variable_transform */
1703 init_p(false),
1704 function_insertion_hook_holder(NULL),
1705 node_duplication_hook_holder(NULL),
1706 node_removal_hook_holder(NULL)
1710 ipa_opt_pass_d *
1711 make_pass_ipa_pure_const (gcc::context *ctxt)
1713 return new pass_ipa_pure_const (ctxt);
1716 /* Return true if function should be skipped for local pure const analysis. */
1718 static bool
1719 skip_function_for_local_pure_const (struct cgraph_node *node)
1721 /* Because we do not schedule pass_fixup_cfg over whole program after early
1722 optimizations we must not promote functions that are called by already
1723 processed functions. */
1725 if (function_called_by_processed_nodes_p ())
1727 if (dump_file)
1728 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1729 return true;
1731 /* Save some work and do not analyze functions which are interposable and
1732 do not have any non-interposable aliases. */
1733 if (node->get_availability () <= AVAIL_INTERPOSABLE
1734 && !node->has_aliases_p ())
1736 if (dump_file)
1737 fprintf (dump_file,
1738 "Function is interposable; not analyzing.\n");
1739 return true;
1741 return false;
1744 /* Simple local pass for pure const discovery reusing the analysis from
1745 ipa_pure_const. This pass is effective when executed together with
1746 other optimization passes in early optimization pass queue. */
1748 namespace {
1750 const pass_data pass_data_local_pure_const =
1752 GIMPLE_PASS, /* type */
1753 "local-pure-const", /* name */
1754 OPTGROUP_NONE, /* optinfo_flags */
1755 TV_IPA_PURE_CONST, /* tv_id */
1756 0, /* properties_required */
1757 0, /* properties_provided */
1758 0, /* properties_destroyed */
1759 0, /* todo_flags_start */
1760 0, /* todo_flags_finish */
1763 class pass_local_pure_const : public gimple_opt_pass
1765 public:
1766 pass_local_pure_const (gcc::context *ctxt)
1767 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
1770 /* opt_pass methods: */
1771 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
1772 virtual bool gate (function *) { return gate_pure_const (); }
1773 virtual unsigned int execute (function *);
1775 }; // class pass_local_pure_const
1777 unsigned int
1778 pass_local_pure_const::execute (function *fun)
1780 bool changed = false;
1781 funct_state l;
1782 bool skip;
1783 struct cgraph_node *node;
1785 node = cgraph_node::get (current_function_decl);
1786 skip = skip_function_for_local_pure_const (node);
1787 if (!warn_suggest_attribute_const
1788 && !warn_suggest_attribute_pure
1789 && skip)
1790 return 0;
1792 l = analyze_function (node, false);
1794 /* Do NORETURN discovery. */
1795 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1796 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1798 warn_function_noreturn (fun->decl);
1799 if (dump_file)
1800 fprintf (dump_file, "Function found to be noreturn: %s\n",
1801 current_function_name ());
1803 /* Update declaration and reduce profile to executed once. */
1804 TREE_THIS_VOLATILE (current_function_decl) = 1;
1805 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1806 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1808 changed = true;
1811 switch (l->pure_const_state)
1813 case IPA_CONST:
1814 if (!TREE_READONLY (current_function_decl))
1816 warn_function_const (current_function_decl, !l->looping);
1817 if (dump_file)
1818 fprintf (dump_file, "Function found to be %sconst: %s\n",
1819 l->looping ? "looping " : "",
1820 current_function_name ());
1822 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1823 && !l->looping)
1825 if (dump_file)
1826 fprintf (dump_file, "Function found to be non-looping: %s\n",
1827 current_function_name ());
1829 if (!skip && node->set_const_flag (true, l->looping))
1831 if (dump_file)
1832 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
1833 l->looping ? "looping " : "",
1834 current_function_name ());
1835 changed = true;
1837 break;
1839 case IPA_PURE:
1840 if (!DECL_PURE_P (current_function_decl))
1842 warn_function_pure (current_function_decl, !l->looping);
1843 if (dump_file)
1844 fprintf (dump_file, "Function found to be %spure: %s\n",
1845 l->looping ? "looping " : "",
1846 current_function_name ());
1848 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1849 && !l->looping)
1851 if (dump_file)
1852 fprintf (dump_file, "Function found to be non-looping: %s\n",
1853 current_function_name ());
1855 if (!skip && node->set_pure_flag (true, l->looping))
1857 if (dump_file)
1858 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
1859 l->looping ? "looping " : "",
1860 current_function_name ());
1861 changed = true;
1863 break;
1865 default:
1866 break;
1868 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1870 node->set_nothrow_flag (true);
1871 changed = true;
1872 if (dump_file)
1873 fprintf (dump_file, "Function found to be nothrow: %s\n",
1874 current_function_name ());
1876 free (l);
1877 if (changed)
1878 return execute_fixup_cfg ();
1879 else
1880 return 0;
1883 } // anon namespace
1885 gimple_opt_pass *
1886 make_pass_local_pure_const (gcc::context *ctxt)
1888 return new pass_local_pure_const (ctxt);
1891 /* Emit noreturn warnings. */
1893 namespace {
1895 const pass_data pass_data_warn_function_noreturn =
1897 GIMPLE_PASS, /* type */
1898 "*warn_function_noreturn", /* name */
1899 OPTGROUP_NONE, /* optinfo_flags */
1900 TV_NONE, /* tv_id */
1901 PROP_cfg, /* properties_required */
1902 0, /* properties_provided */
1903 0, /* properties_destroyed */
1904 0, /* todo_flags_start */
1905 0, /* todo_flags_finish */
1908 class pass_warn_function_noreturn : public gimple_opt_pass
1910 public:
1911 pass_warn_function_noreturn (gcc::context *ctxt)
1912 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
1915 /* opt_pass methods: */
1916 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
1917 virtual unsigned int execute (function *fun)
1919 if (!TREE_THIS_VOLATILE (current_function_decl)
1920 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
1921 warn_function_noreturn (current_function_decl);
1922 return 0;
1925 }; // class pass_warn_function_noreturn
1927 } // anon namespace
1929 gimple_opt_pass *
1930 make_pass_warn_function_noreturn (gcc::context *ctxt)
1932 return new pass_warn_function_noreturn (ctxt);
1935 /* Simple local pass for pure const discovery reusing the analysis from
1936 ipa_pure_const. This pass is effective when executed together with
1937 other optimization passes in early optimization pass queue. */
1939 namespace {
1941 const pass_data pass_data_nothrow =
1943 GIMPLE_PASS, /* type */
1944 "nothrow", /* name */
1945 OPTGROUP_NONE, /* optinfo_flags */
1946 TV_IPA_PURE_CONST, /* tv_id */
1947 0, /* properties_required */
1948 0, /* properties_provided */
1949 0, /* properties_destroyed */
1950 0, /* todo_flags_start */
1951 0, /* todo_flags_finish */
1954 class pass_nothrow : public gimple_opt_pass
1956 public:
1957 pass_nothrow (gcc::context *ctxt)
1958 : gimple_opt_pass (pass_data_nothrow, ctxt)
1961 /* opt_pass methods: */
1962 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
1963 virtual bool gate (function *) { return optimize; }
1964 virtual unsigned int execute (function *);
1966 }; // class pass_nothrow
1968 unsigned int
1969 pass_nothrow::execute (function *)
1971 struct cgraph_node *node;
1972 basic_block this_block;
1974 if (TREE_NOTHROW (current_function_decl))
1975 return 0;
1977 node = cgraph_node::get (current_function_decl);
1979 /* We run during lowering, we can not really use availability yet. */
1980 if (cgraph_node::get (current_function_decl)->get_availability ()
1981 <= AVAIL_INTERPOSABLE)
1983 if (dump_file)
1984 fprintf (dump_file, "Function is interposable;"
1985 " not analyzing.\n");
1986 return true;
1989 FOR_EACH_BB_FN (this_block, cfun)
1991 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
1992 !gsi_end_p (gsi);
1993 gsi_next (&gsi))
1994 if (stmt_can_throw_external (gsi_stmt (gsi)))
1996 if (is_gimple_call (gsi_stmt (gsi)))
1998 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
1999 if (callee_t && recursive_call_p (current_function_decl,
2000 callee_t))
2001 continue;
2004 if (dump_file)
2006 fprintf (dump_file, "Statement can throw: ");
2007 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
2009 return 0;
2013 node->set_nothrow_flag (true);
2015 bool cfg_changed = false;
2016 if (self_recursive_p (node))
2017 FOR_EACH_BB_FN (this_block, cfun)
2018 if (gimple *g = last_stmt (this_block))
2019 if (is_gimple_call (g))
2021 tree callee_t = gimple_call_fndecl (g);
2022 if (callee_t
2023 && recursive_call_p (current_function_decl, callee_t)
2024 && maybe_clean_eh_stmt (g)
2025 && gimple_purge_dead_eh_edges (this_block))
2026 cfg_changed = true;
2029 if (dump_file)
2030 fprintf (dump_file, "Function found to be nothrow: %s\n",
2031 current_function_name ());
2032 return cfg_changed ? TODO_cleanup_cfg : 0;
2035 } // anon namespace
2037 gimple_opt_pass *
2038 make_pass_nothrow (gcc::context *ctxt)
2040 return new pass_nothrow (ctxt);