Fix typos in riscv register save/restore.
[official-gcc.git] / gcc / ipa-pure-const.c
blob22f92fcd7a096168864a40d4c9ac3ae133d8cc46
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68 enum pure_const_state_e
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
77 enum malloc_state_e
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
87 decl. */
88 struct funct_state_d
90 /* See above. */
91 enum pure_const_state_e pure_const_state;
92 /* What user set here; we can be always sure about this. */
93 enum pure_const_state_e state_previously_known;
94 bool looping_previously_known;
96 /* True if the function could possibly infinite loop. There are a
97 lot of ways that this could be determined. We are pretty
98 conservative here. While it is possible to cse pure and const
99 calls, it is not legal to have dce get rid of the call if there
100 is a possibility that the call could infinite loop since this is
101 a behavioral change. */
102 bool looping;
104 bool can_throw;
106 /* If function can call free, munmap or otherwise make previously
107 non-trapping memory accesses trapping. */
108 bool can_free;
110 enum malloc_state_e malloc_state;
113 /* State used when we know nothing about function. */
114 static struct funct_state_d varying_state
115 = { IPA_NEITHER, IPA_NEITHER, true, true, true, true, STATE_MALLOC_BOTTOM };
118 typedef struct funct_state_d * funct_state;
120 /* The storage of the funct_state is abstracted because there is the
121 possibility that it may be desirable to move this to the cgraph
122 local info. */
124 /* Array, indexed by cgraph node uid, of function states. */
126 static vec<funct_state> funct_state_vec;
128 static bool gate_pure_const (void);
130 namespace {
132 const pass_data pass_data_ipa_pure_const =
134 IPA_PASS, /* type */
135 "pure-const", /* name */
136 OPTGROUP_NONE, /* optinfo_flags */
137 TV_IPA_PURE_CONST, /* tv_id */
138 0, /* properties_required */
139 0, /* properties_provided */
140 0, /* properties_destroyed */
141 0, /* todo_flags_start */
142 0, /* todo_flags_finish */
145 class pass_ipa_pure_const : public ipa_opt_pass_d
147 public:
148 pass_ipa_pure_const(gcc::context *ctxt);
150 /* opt_pass methods: */
151 bool gate (function *) { return gate_pure_const (); }
152 unsigned int execute (function *fun);
154 void register_hooks (void);
156 private:
157 bool init_p;
159 /* Holders of ipa cgraph hooks: */
160 struct cgraph_node_hook_list *function_insertion_hook_holder;
161 struct cgraph_2node_hook_list *node_duplication_hook_holder;
162 struct cgraph_node_hook_list *node_removal_hook_holder;
164 }; // class pass_ipa_pure_const
166 } // anon namespace
168 /* Try to guess if function body will always be visible to compiler
169 when compiling the call and whether compiler will be able
170 to propagate the information by itself. */
172 static bool
173 function_always_visible_to_compiler_p (tree decl)
175 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
176 || DECL_COMDAT (decl));
179 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
180 is true if the function is known to be finite. The diagnostic is
181 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
182 OPTION, this function may initialize it and it is always returned
183 by the function. */
185 static hash_set<tree> *
186 suggest_attribute (int option, tree decl, bool known_finite,
187 hash_set<tree> *warned_about,
188 const char * attrib_name)
190 if (!option_enabled (option, &global_options))
191 return warned_about;
192 if (TREE_THIS_VOLATILE (decl)
193 || (known_finite && function_always_visible_to_compiler_p (decl)))
194 return warned_about;
196 if (!warned_about)
197 warned_about = new hash_set<tree>;
198 if (warned_about->contains (decl))
199 return warned_about;
200 warned_about->add (decl);
201 warning_at (DECL_SOURCE_LOCATION (decl),
202 option,
203 known_finite
204 ? G_("function might be candidate for attribute %qs")
205 : G_("function might be candidate for attribute %qs"
206 " if it is known to return normally"), attrib_name);
207 return warned_about;
210 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
211 is true if the function is known to be finite. */
213 static void
214 warn_function_pure (tree decl, bool known_finite)
216 static hash_set<tree> *warned_about;
218 warned_about
219 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
220 known_finite, warned_about, "pure");
223 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
224 is true if the function is known to be finite. */
226 static void
227 warn_function_const (tree decl, bool known_finite)
229 static hash_set<tree> *warned_about;
230 warned_about
231 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
232 known_finite, warned_about, "const");
235 /* Emit suggestion about __attribute__((malloc)) for DECL. */
237 static void
238 warn_function_malloc (tree decl)
240 static hash_set<tree> *warned_about;
241 warned_about
242 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
243 false, warned_about, "malloc");
246 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
248 static void
249 warn_function_noreturn (tree decl)
251 tree original_decl = decl;
253 cgraph_node *node = cgraph_node::get (decl);
254 if (node->instrumentation_clone)
255 decl = node->instrumented_version->decl;
257 static hash_set<tree> *warned_about;
258 if (!lang_hooks.missing_noreturn_ok_p (decl)
259 && targetm.warn_func_return (decl))
260 warned_about
261 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
262 true, warned_about, "noreturn");
265 void
266 warn_function_cold (tree decl)
268 tree original_decl = decl;
270 cgraph_node *node = cgraph_node::get (decl);
271 if (node->instrumentation_clone)
272 decl = node->instrumented_version->decl;
274 static hash_set<tree> *warned_about;
275 warned_about
276 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
277 true, warned_about, "cold");
280 /* Return true if we have a function state for NODE. */
282 static inline bool
283 has_function_state (struct cgraph_node *node)
285 if (!funct_state_vec.exists ()
286 || funct_state_vec.length () <= (unsigned int)node->uid)
287 return false;
288 return funct_state_vec[node->uid] != NULL;
291 /* Return the function state from NODE. */
293 static inline funct_state
294 get_function_state (struct cgraph_node *node)
296 if (!funct_state_vec.exists ()
297 || funct_state_vec.length () <= (unsigned int)node->uid
298 || !funct_state_vec[node->uid])
299 /* We might want to put correct previously_known state into varying. */
300 return &varying_state;
301 return funct_state_vec[node->uid];
304 /* Set the function state S for NODE. */
306 static inline void
307 set_function_state (struct cgraph_node *node, funct_state s)
309 if (!funct_state_vec.exists ()
310 || funct_state_vec.length () <= (unsigned int)node->uid)
311 funct_state_vec.safe_grow_cleared (node->uid + 1);
313 /* If funct_state_vec already contains a funct_state, we have to release
314 it before it's going to be ovewritten. */
315 if (funct_state_vec[node->uid] != NULL
316 && funct_state_vec[node->uid] != &varying_state)
317 free (funct_state_vec[node->uid]);
319 funct_state_vec[node->uid] = s;
322 /* Check to see if the use (or definition when CHECKING_WRITE is true)
323 variable T is legal in a function that is either pure or const. */
325 static inline void
326 check_decl (funct_state local,
327 tree t, bool checking_write, bool ipa)
329 /* Do not want to do anything with volatile except mark any
330 function that uses one to be not const or pure. */
331 if (TREE_THIS_VOLATILE (t))
333 local->pure_const_state = IPA_NEITHER;
334 if (dump_file)
335 fprintf (dump_file, " Volatile operand is not const/pure\n");
336 return;
339 /* Do not care about a local automatic that is not static. */
340 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
341 return;
343 /* If the variable has the "used" attribute, treat it as if it had a
344 been touched by the devil. */
345 if (DECL_PRESERVE_P (t))
347 local->pure_const_state = IPA_NEITHER;
348 if (dump_file)
349 fprintf (dump_file, " Used static/global variable is not const/pure\n");
350 return;
353 /* In IPA mode we are not interested in checking actual loads and stores;
354 they will be processed at propagation time using ipa_ref. */
355 if (ipa)
356 return;
358 /* Since we have dealt with the locals and params cases above, if we
359 are CHECKING_WRITE, this cannot be a pure or constant
360 function. */
361 if (checking_write)
363 local->pure_const_state = IPA_NEITHER;
364 if (dump_file)
365 fprintf (dump_file, " static/global memory write is not const/pure\n");
366 return;
369 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
371 /* Readonly reads are safe. */
372 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
373 return; /* Read of a constant, do not change the function state. */
374 else
376 if (dump_file)
377 fprintf (dump_file, " global memory read is not const\n");
378 /* Just a regular read. */
379 if (local->pure_const_state == IPA_CONST)
380 local->pure_const_state = IPA_PURE;
383 else
385 /* Compilation level statics can be read if they are readonly
386 variables. */
387 if (TREE_READONLY (t))
388 return;
390 if (dump_file)
391 fprintf (dump_file, " static memory read is not const\n");
392 /* Just a regular read. */
393 if (local->pure_const_state == IPA_CONST)
394 local->pure_const_state = IPA_PURE;
399 /* Check to see if the use (or definition when CHECKING_WRITE is true)
400 variable T is legal in a function that is either pure or const. */
402 static inline void
403 check_op (funct_state local, tree t, bool checking_write)
405 t = get_base_address (t);
406 if (t && TREE_THIS_VOLATILE (t))
408 local->pure_const_state = IPA_NEITHER;
409 if (dump_file)
410 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
411 return;
413 else if (t
414 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
415 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
416 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
418 if (dump_file)
419 fprintf (dump_file, " Indirect ref to local memory is OK\n");
420 return;
422 else if (checking_write)
424 local->pure_const_state = IPA_NEITHER;
425 if (dump_file)
426 fprintf (dump_file, " Indirect ref write is not const/pure\n");
427 return;
429 else
431 if (dump_file)
432 fprintf (dump_file, " Indirect ref read is not const\n");
433 if (local->pure_const_state == IPA_CONST)
434 local->pure_const_state = IPA_PURE;
438 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
440 static void
441 state_from_flags (enum pure_const_state_e *state, bool *looping,
442 int flags, bool cannot_lead_to_return)
444 *looping = false;
445 if (flags & ECF_LOOPING_CONST_OR_PURE)
447 *looping = true;
448 if (dump_file && (dump_flags & TDF_DETAILS))
449 fprintf (dump_file, " looping\n");
451 if (flags & ECF_CONST)
453 *state = IPA_CONST;
454 if (dump_file && (dump_flags & TDF_DETAILS))
455 fprintf (dump_file, " const\n");
457 else if (flags & ECF_PURE)
459 *state = IPA_PURE;
460 if (dump_file && (dump_flags & TDF_DETAILS))
461 fprintf (dump_file, " pure\n");
463 else if (cannot_lead_to_return)
465 *state = IPA_PURE;
466 *looping = true;
467 if (dump_file && (dump_flags & TDF_DETAILS))
468 fprintf (dump_file, " ignoring side effects->pure looping\n");
470 else
472 if (dump_file && (dump_flags & TDF_DETAILS))
473 fprintf (dump_file, " neither\n");
474 *state = IPA_NEITHER;
475 *looping = true;
479 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
480 into STATE and LOOPING better of the two variants.
481 Be sure to merge looping correctly. IPA_NEITHER functions
482 have looping 0 even if they don't have to return. */
484 static inline void
485 better_state (enum pure_const_state_e *state, bool *looping,
486 enum pure_const_state_e state2, bool looping2)
488 if (state2 < *state)
490 if (*state == IPA_NEITHER)
491 *looping = looping2;
492 else
493 *looping = MIN (*looping, looping2);
494 *state = state2;
496 else if (state2 != IPA_NEITHER)
497 *looping = MIN (*looping, looping2);
500 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
501 into STATE and LOOPING worse of the two variants.
502 N is the actual node called. */
504 static inline void
505 worse_state (enum pure_const_state_e *state, bool *looping,
506 enum pure_const_state_e state2, bool looping2,
507 struct symtab_node *from,
508 struct symtab_node *to)
510 /* Consider function:
512 bool a(int *p)
514 return *p==*p;
517 During early optimization we will turn this into:
519 bool a(int *p)
521 return true;
524 Now if this function will be detected as CONST however when interposed it
525 may end up being just pure. We always must assume the worst scenario here.
527 if (*state == IPA_CONST && state2 == IPA_CONST
528 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
530 if (dump_file && (dump_flags & TDF_DETAILS))
531 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
532 "bind to current def.\n", to->name ());
533 state2 = IPA_PURE;
535 *state = MAX (*state, state2);
536 *looping = MAX (*looping, looping2);
539 /* Recognize special cases of builtins that are by themselves not pure or const
540 but function using them is. */
541 static bool
542 special_builtin_state (enum pure_const_state_e *state, bool *looping,
543 tree callee)
545 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
546 switch (DECL_FUNCTION_CODE (callee))
548 case BUILT_IN_RETURN:
549 case BUILT_IN_UNREACHABLE:
550 CASE_BUILT_IN_ALLOCA:
551 case BUILT_IN_STACK_SAVE:
552 case BUILT_IN_STACK_RESTORE:
553 case BUILT_IN_EH_POINTER:
554 case BUILT_IN_EH_FILTER:
555 case BUILT_IN_UNWIND_RESUME:
556 case BUILT_IN_CXA_END_CLEANUP:
557 case BUILT_IN_EH_COPY_VALUES:
558 case BUILT_IN_FRAME_ADDRESS:
559 case BUILT_IN_APPLY:
560 case BUILT_IN_APPLY_ARGS:
561 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
562 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
563 *looping = false;
564 *state = IPA_CONST;
565 return true;
566 case BUILT_IN_PREFETCH:
567 *looping = true;
568 *state = IPA_CONST;
569 return true;
570 default:
571 break;
573 return false;
576 /* Check the parameters of a function call to CALL_EXPR to see if
577 there are any references in the parameters that are not allowed for
578 pure or const functions. Also check to see if this is either an
579 indirect call, a call outside the compilation unit, or has special
580 attributes that may also effect the purity. The CALL_EXPR node for
581 the entire call expression. */
583 static void
584 check_call (funct_state local, gcall *call, bool ipa)
586 int flags = gimple_call_flags (call);
587 tree callee_t = gimple_call_fndecl (call);
588 bool possibly_throws = stmt_could_throw_p (call);
589 bool possibly_throws_externally = (possibly_throws
590 && stmt_can_throw_external (call));
592 if (possibly_throws)
594 unsigned int i;
595 for (i = 0; i < gimple_num_ops (call); i++)
596 if (gimple_op (call, i)
597 && tree_could_throw_p (gimple_op (call, i)))
599 if (possibly_throws && cfun->can_throw_non_call_exceptions)
601 if (dump_file)
602 fprintf (dump_file, " operand can throw; looping\n");
603 local->looping = true;
605 if (possibly_throws_externally)
607 if (dump_file)
608 fprintf (dump_file, " operand can throw externally\n");
609 local->can_throw = true;
614 /* The const and pure flags are set by a variety of places in the
615 compiler (including here). If someone has already set the flags
616 for the callee, (such as for some of the builtins) we will use
617 them, otherwise we will compute our own information.
619 Const and pure functions have less clobber effects than other
620 functions so we process these first. Otherwise if it is a call
621 outside the compilation unit or an indirect call we punt. This
622 leaves local calls which will be processed by following the call
623 graph. */
624 if (callee_t)
626 enum pure_const_state_e call_state;
627 bool call_looping;
629 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
630 && !nonfreeing_call_p (call))
631 local->can_free = true;
633 if (special_builtin_state (&call_state, &call_looping, callee_t))
635 worse_state (&local->pure_const_state, &local->looping,
636 call_state, call_looping,
637 NULL, NULL);
638 return;
640 /* When bad things happen to bad functions, they cannot be const
641 or pure. */
642 if (setjmp_call_p (callee_t))
644 if (dump_file)
645 fprintf (dump_file, " setjmp is not const/pure\n");
646 local->looping = true;
647 local->pure_const_state = IPA_NEITHER;
650 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
651 switch (DECL_FUNCTION_CODE (callee_t))
653 case BUILT_IN_LONGJMP:
654 case BUILT_IN_NONLOCAL_GOTO:
655 if (dump_file)
656 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
657 local->pure_const_state = IPA_NEITHER;
658 local->looping = true;
659 break;
660 default:
661 break;
664 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
665 local->can_free = true;
667 /* When not in IPA mode, we can still handle self recursion. */
668 if (!ipa && callee_t
669 && recursive_call_p (current_function_decl, callee_t))
671 if (dump_file)
672 fprintf (dump_file, " Recursive call can loop.\n");
673 local->looping = true;
675 /* Either callee is unknown or we are doing local analysis.
676 Look to see if there are any bits available for the callee (such as by
677 declaration or because it is builtin) and process solely on the basis of
678 those bits. Handle internal calls always, those calls don't have
679 corresponding cgraph edges and thus aren't processed during
680 the propagation. */
681 else if (!ipa || gimple_call_internal_p (call))
683 enum pure_const_state_e call_state;
684 bool call_looping;
685 if (possibly_throws && cfun->can_throw_non_call_exceptions)
687 if (dump_file)
688 fprintf (dump_file, " can throw; looping\n");
689 local->looping = true;
691 if (possibly_throws_externally)
693 if (dump_file)
695 fprintf (dump_file, " can throw externally to lp %i\n",
696 lookup_stmt_eh_lp (call));
697 if (callee_t)
698 fprintf (dump_file, " callee:%s\n",
699 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
701 local->can_throw = true;
703 if (dump_file && (dump_flags & TDF_DETAILS))
704 fprintf (dump_file, " checking flags for call:");
705 state_from_flags (&call_state, &call_looping, flags,
706 ((flags & (ECF_NORETURN | ECF_NOTHROW))
707 == (ECF_NORETURN | ECF_NOTHROW))
708 || (!flag_exceptions && (flags & ECF_NORETURN)));
709 worse_state (&local->pure_const_state, &local->looping,
710 call_state, call_looping, NULL, NULL);
712 /* Direct functions calls are handled by IPA propagation. */
715 /* Wrapper around check_decl for loads in local more. */
717 static bool
718 check_load (gimple *, tree op, tree, void *data)
720 if (DECL_P (op))
721 check_decl ((funct_state)data, op, false, false);
722 else
723 check_op ((funct_state)data, op, false);
724 return false;
727 /* Wrapper around check_decl for stores in local more. */
729 static bool
730 check_store (gimple *, tree op, tree, void *data)
732 if (DECL_P (op))
733 check_decl ((funct_state)data, op, true, false);
734 else
735 check_op ((funct_state)data, op, true);
736 return false;
739 /* Wrapper around check_decl for loads in ipa mode. */
741 static bool
742 check_ipa_load (gimple *, tree op, tree, void *data)
744 if (DECL_P (op))
745 check_decl ((funct_state)data, op, false, true);
746 else
747 check_op ((funct_state)data, op, false);
748 return false;
751 /* Wrapper around check_decl for stores in ipa mode. */
753 static bool
754 check_ipa_store (gimple *, tree op, tree, void *data)
756 if (DECL_P (op))
757 check_decl ((funct_state)data, op, true, true);
758 else
759 check_op ((funct_state)data, op, true);
760 return false;
763 /* Look into pointer pointed to by GSIP and figure out what interesting side
764 effects it has. */
765 static void
766 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
768 gimple *stmt = gsi_stmt (*gsip);
770 if (is_gimple_debug (stmt))
771 return;
773 /* Do consider clobber as side effects before IPA, so we rather inline
774 C++ destructors and keep clobber semantics than eliminate them.
776 TODO: We may get smarter during early optimizations on these and let
777 functions containing only clobbers to be optimized more. This is a common
778 case of C++ destructors. */
780 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
781 return;
783 if (dump_file)
785 fprintf (dump_file, " scanning: ");
786 print_gimple_stmt (dump_file, stmt, 0);
789 if (gimple_has_volatile_ops (stmt)
790 && !gimple_clobber_p (stmt))
792 local->pure_const_state = IPA_NEITHER;
793 if (dump_file)
794 fprintf (dump_file, " Volatile stmt is not const/pure\n");
797 /* Look for loads and stores. */
798 walk_stmt_load_store_ops (stmt, local,
799 ipa ? check_ipa_load : check_load,
800 ipa ? check_ipa_store : check_store);
802 if (gimple_code (stmt) != GIMPLE_CALL
803 && stmt_could_throw_p (stmt))
805 if (cfun->can_throw_non_call_exceptions)
807 if (dump_file)
808 fprintf (dump_file, " can throw; looping\n");
809 local->looping = true;
811 if (stmt_can_throw_external (stmt))
813 if (dump_file)
814 fprintf (dump_file, " can throw externally\n");
815 local->can_throw = true;
817 else
818 if (dump_file)
819 fprintf (dump_file, " can throw\n");
821 switch (gimple_code (stmt))
823 case GIMPLE_CALL:
824 check_call (local, as_a <gcall *> (stmt), ipa);
825 break;
826 case GIMPLE_LABEL:
827 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
828 /* Target of long jump. */
830 if (dump_file)
831 fprintf (dump_file, " nonlocal label is not const/pure\n");
832 local->pure_const_state = IPA_NEITHER;
834 break;
835 case GIMPLE_ASM:
836 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
838 if (dump_file)
839 fprintf (dump_file, " memory asm clobber is not const/pure\n");
840 /* Abandon all hope, ye who enter here. */
841 local->pure_const_state = IPA_NEITHER;
842 local->can_free = true;
844 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
846 if (dump_file)
847 fprintf (dump_file, " volatile is not const/pure\n");
848 /* Abandon all hope, ye who enter here. */
849 local->pure_const_state = IPA_NEITHER;
850 local->looping = true;
851 local->can_free = true;
853 return;
854 default:
855 break;
859 /* Check that RETVAL is used only in STMT and in comparisons against 0.
860 RETVAL is return value of the function and STMT is return stmt. */
862 static bool
863 check_retval_uses (tree retval, gimple *stmt)
865 imm_use_iterator use_iter;
866 gimple *use_stmt;
868 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
869 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
871 tree op2 = gimple_cond_rhs (cond);
872 if (!integer_zerop (op2))
873 RETURN_FROM_IMM_USE_STMT (use_iter, false);
875 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
877 enum tree_code code = gimple_assign_rhs_code (ga);
878 if (TREE_CODE_CLASS (code) != tcc_comparison)
879 RETURN_FROM_IMM_USE_STMT (use_iter, false);
880 if (!integer_zerop (gimple_assign_rhs2 (ga)))
881 RETURN_FROM_IMM_USE_STMT (use_iter, false);
883 else if (is_gimple_debug (use_stmt))
885 else if (use_stmt != stmt)
886 RETURN_FROM_IMM_USE_STMT (use_iter, false);
888 return true;
891 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
892 attribute. Currently this function does a very conservative analysis.
893 FUN is considered to be a candidate if
894 1) It returns a value of pointer type.
895 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
896 a phi, and element of phi is either NULL or
897 SSA_NAME_DEF_STMT(element) is function call.
898 3) The return-value has immediate uses only within comparisons (gcond or gassign)
899 and return_stmt (and likewise a phi arg has immediate use only within comparison
900 or the phi stmt). */
902 static bool
903 malloc_candidate_p (function *fun, bool ipa)
905 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
906 edge e;
907 edge_iterator ei;
908 cgraph_node *node = cgraph_node::get_create (fun->decl);
910 #define DUMP_AND_RETURN(reason) \
912 if (dump_file && (dump_flags & TDF_DETAILS)) \
913 fprintf (dump_file, "%s", (reason)); \
914 return false; \
917 if (EDGE_COUNT (exit_block->preds) == 0)
918 return false;
920 FOR_EACH_EDGE (e, ei, exit_block->preds)
922 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
923 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
925 if (!ret_stmt)
926 return false;
928 tree retval = gimple_return_retval (ret_stmt);
929 if (!retval)
930 DUMP_AND_RETURN("No return value.")
932 if (TREE_CODE (retval) != SSA_NAME
933 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
934 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
936 if (!check_retval_uses (retval, ret_stmt))
937 DUMP_AND_RETURN("Return value has uses outside return stmt"
938 " and comparisons against 0.")
940 gimple *def = SSA_NAME_DEF_STMT (retval);
941 if (gcall *call_stmt = dyn_cast<gcall *> (def))
943 tree callee_decl = gimple_call_fndecl (call_stmt);
944 if (!callee_decl)
945 return false;
947 if (!ipa && !DECL_IS_MALLOC (callee_decl))
948 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
949 " non-ipa mode.")
951 cgraph_edge *cs = node->get_edge (call_stmt);
952 if (cs)
954 ipa_call_summary *es = ipa_call_summaries->get (cs);
955 gcc_assert (es);
956 es->is_return_callee_uncaptured = true;
960 else if (gphi *phi = dyn_cast<gphi *> (def))
961 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
963 tree arg = gimple_phi_arg_def (phi, i);
964 if (TREE_CODE (arg) != SSA_NAME)
965 DUMP_AND_RETURN("phi arg is not SSA_NAME.")
966 if (!(arg == null_pointer_node || check_retval_uses (arg, phi)))
967 DUMP_AND_RETURN("phi arg has uses outside phi"
968 " and comparisons against 0.")
970 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
971 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
972 if (!call_stmt)
973 return false;
974 tree callee_decl = gimple_call_fndecl (call_stmt);
975 if (!callee_decl)
976 return false;
977 if (!ipa && !DECL_IS_MALLOC (callee_decl))
978 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
979 " non-ipa mode.")
981 cgraph_edge *cs = node->get_edge (call_stmt);
982 if (cs)
984 ipa_call_summary *es = ipa_call_summaries->get (cs);
985 gcc_assert (es);
986 es->is_return_callee_uncaptured = true;
990 else
991 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
994 if (dump_file && (dump_flags & TDF_DETAILS))
995 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
996 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
997 return true;
999 #undef DUMP_AND_RETURN
1003 /* This is the main routine for finding the reference patterns for
1004 global variables within a function FN. */
1006 static funct_state
1007 analyze_function (struct cgraph_node *fn, bool ipa)
1009 tree decl = fn->decl;
1010 funct_state l;
1011 basic_block this_block;
1013 l = XCNEW (struct funct_state_d);
1014 l->pure_const_state = IPA_CONST;
1015 l->state_previously_known = IPA_NEITHER;
1016 l->looping_previously_known = true;
1017 l->looping = false;
1018 l->can_throw = false;
1019 l->can_free = false;
1020 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1021 flags_from_decl_or_type (fn->decl),
1022 fn->cannot_return_p ());
1024 if (fn->thunk.thunk_p || fn->alias)
1026 /* Thunk gets propagated through, so nothing interesting happens. */
1027 gcc_assert (ipa);
1028 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1029 l->pure_const_state = IPA_NEITHER;
1030 return l;
1033 if (dump_file)
1035 fprintf (dump_file, "\n\n local analysis of %s\n ",
1036 fn->name ());
1039 push_cfun (DECL_STRUCT_FUNCTION (decl));
1041 FOR_EACH_BB_FN (this_block, cfun)
1043 gimple_stmt_iterator gsi;
1044 struct walk_stmt_info wi;
1046 memset (&wi, 0, sizeof (wi));
1047 for (gsi = gsi_start_bb (this_block);
1048 !gsi_end_p (gsi);
1049 gsi_next (&gsi))
1051 check_stmt (&gsi, l, ipa);
1052 if (l->pure_const_state == IPA_NEITHER
1053 && l->looping
1054 && l->can_throw
1055 && l->can_free)
1056 goto end;
1060 end:
1061 if (l->pure_const_state != IPA_NEITHER)
1063 /* Const functions cannot have back edges (an
1064 indication of possible infinite loop side
1065 effect. */
1066 if (mark_dfs_back_edges ())
1068 /* Preheaders are needed for SCEV to work.
1069 Simple latches and recorded exits improve chances that loop will
1070 proved to be finite in testcases such as in loop-15.c
1071 and loop-24.c */
1072 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1073 | LOOPS_HAVE_SIMPLE_LATCHES
1074 | LOOPS_HAVE_RECORDED_EXITS);
1075 if (dump_file && (dump_flags & TDF_DETAILS))
1076 flow_loops_dump (dump_file, NULL, 0);
1077 if (mark_irreducible_loops ())
1079 if (dump_file)
1080 fprintf (dump_file, " has irreducible loops\n");
1081 l->looping = true;
1083 else
1085 struct loop *loop;
1086 scev_initialize ();
1087 FOR_EACH_LOOP (loop, 0)
1088 if (!finite_loop_p (loop))
1090 if (dump_file)
1091 fprintf (dump_file, " can not prove finiteness of "
1092 "loop %i\n", loop->num);
1093 l->looping =true;
1094 break;
1096 scev_finalize ();
1098 loop_optimizer_finalize ();
1102 if (dump_file && (dump_flags & TDF_DETAILS))
1103 fprintf (dump_file, " checking previously known:");
1105 better_state (&l->pure_const_state, &l->looping,
1106 l->state_previously_known,
1107 l->looping_previously_known);
1108 if (TREE_NOTHROW (decl))
1109 l->can_throw = false;
1111 l->malloc_state = STATE_MALLOC_BOTTOM;
1112 if (DECL_IS_MALLOC (decl))
1113 l->malloc_state = STATE_MALLOC;
1114 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1115 l->malloc_state = STATE_MALLOC_TOP;
1116 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1117 l->malloc_state = STATE_MALLOC;
1119 pop_cfun ();
1120 if (dump_file)
1122 if (l->looping)
1123 fprintf (dump_file, "Function is locally looping.\n");
1124 if (l->can_throw)
1125 fprintf (dump_file, "Function is locally throwing.\n");
1126 if (l->pure_const_state == IPA_CONST)
1127 fprintf (dump_file, "Function is locally const.\n");
1128 if (l->pure_const_state == IPA_PURE)
1129 fprintf (dump_file, "Function is locally pure.\n");
1130 if (l->can_free)
1131 fprintf (dump_file, "Function can locally free.\n");
1132 if (l->malloc_state == STATE_MALLOC)
1133 fprintf (dump_file, "Function is locally malloc.\n");
1135 return l;
1138 /* Called when new function is inserted to callgraph late. */
1139 static void
1140 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1142 /* There are some shared nodes, in particular the initializers on
1143 static declarations. We do not need to scan them more than once
1144 since all we would be interested in are the addressof
1145 operations. */
1146 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1147 set_function_state (node, analyze_function (node, true));
1150 /* Called when new clone is inserted to callgraph late. */
1152 static void
1153 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
1154 void *data ATTRIBUTE_UNUSED)
1156 if (has_function_state (src))
1158 funct_state l = XNEW (struct funct_state_d);
1159 gcc_assert (!has_function_state (dst));
1160 memcpy (l, get_function_state (src), sizeof (*l));
1161 set_function_state (dst, l);
1165 /* Called when new clone is inserted to callgraph late. */
1167 static void
1168 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
1170 if (has_function_state (node))
1171 set_function_state (node, NULL);
1175 void
1176 pass_ipa_pure_const::
1177 register_hooks (void)
1179 if (init_p)
1180 return;
1182 init_p = true;
1184 node_removal_hook_holder =
1185 symtab->add_cgraph_removal_hook (&remove_node_data, NULL);
1186 node_duplication_hook_holder =
1187 symtab->add_cgraph_duplication_hook (&duplicate_node_data, NULL);
1188 function_insertion_hook_holder =
1189 symtab->add_cgraph_insertion_hook (&add_new_function, NULL);
1193 /* Analyze each function in the cgraph to see if it is locally PURE or
1194 CONST. */
1196 static void
1197 pure_const_generate_summary (void)
1199 struct cgraph_node *node;
1201 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1202 pass->register_hooks ();
1204 /* Process all of the functions.
1206 We process AVAIL_INTERPOSABLE functions. We can not use the results
1207 by default, but the info can be used at LTO with -fwhole-program or
1208 when function got cloned and the clone is AVAILABLE. */
1210 FOR_EACH_DEFINED_FUNCTION (node)
1211 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1212 set_function_state (node, analyze_function (node, true));
1216 /* Serialize the ipa info for lto. */
1218 static void
1219 pure_const_write_summary (void)
1221 struct cgraph_node *node;
1222 struct lto_simple_output_block *ob
1223 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1224 unsigned int count = 0;
1225 lto_symtab_encoder_iterator lsei;
1226 lto_symtab_encoder_t encoder;
1228 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1230 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1231 lsei_next_function_in_partition (&lsei))
1233 node = lsei_cgraph_node (lsei);
1234 if (node->definition && has_function_state (node))
1235 count++;
1238 streamer_write_uhwi_stream (ob->main_stream, count);
1240 /* Process all of the functions. */
1241 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1242 lsei_next_function_in_partition (&lsei))
1244 node = lsei_cgraph_node (lsei);
1245 if (node->definition && has_function_state (node))
1247 struct bitpack_d bp;
1248 funct_state fs;
1249 int node_ref;
1250 lto_symtab_encoder_t encoder;
1252 fs = get_function_state (node);
1254 encoder = ob->decl_state->symtab_node_encoder;
1255 node_ref = lto_symtab_encoder_encode (encoder, node);
1256 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1258 /* Note that flags will need to be read in the opposite
1259 order as we are pushing the bitflags into FLAGS. */
1260 bp = bitpack_create (ob->main_stream);
1261 bp_pack_value (&bp, fs->pure_const_state, 2);
1262 bp_pack_value (&bp, fs->state_previously_known, 2);
1263 bp_pack_value (&bp, fs->looping_previously_known, 1);
1264 bp_pack_value (&bp, fs->looping, 1);
1265 bp_pack_value (&bp, fs->can_throw, 1);
1266 bp_pack_value (&bp, fs->can_free, 1);
1267 bp_pack_value (&bp, fs->malloc_state, 2);
1268 streamer_write_bitpack (&bp);
1272 lto_destroy_simple_output_block (ob);
1276 /* Deserialize the ipa info for lto. */
1278 static void
1279 pure_const_read_summary (void)
1281 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1282 struct lto_file_decl_data *file_data;
1283 unsigned int j = 0;
1285 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1286 pass->register_hooks ();
1288 while ((file_data = file_data_vec[j++]))
1290 const char *data;
1291 size_t len;
1292 struct lto_input_block *ib
1293 = lto_create_simple_input_block (file_data,
1294 LTO_section_ipa_pure_const,
1295 &data, &len);
1296 if (ib)
1298 unsigned int i;
1299 unsigned int count = streamer_read_uhwi (ib);
1301 for (i = 0; i < count; i++)
1303 unsigned int index;
1304 struct cgraph_node *node;
1305 struct bitpack_d bp;
1306 funct_state fs;
1307 lto_symtab_encoder_t encoder;
1309 fs = XCNEW (struct funct_state_d);
1310 index = streamer_read_uhwi (ib);
1311 encoder = file_data->symtab_node_encoder;
1312 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1313 index));
1314 set_function_state (node, fs);
1316 /* Note that the flags must be read in the opposite
1317 order in which they were written (the bitflags were
1318 pushed into FLAGS). */
1319 bp = streamer_read_bitpack (ib);
1320 fs->pure_const_state
1321 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1322 fs->state_previously_known
1323 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1324 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1325 fs->looping = bp_unpack_value (&bp, 1);
1326 fs->can_throw = bp_unpack_value (&bp, 1);
1327 fs->can_free = bp_unpack_value (&bp, 1);
1328 fs->malloc_state
1329 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1331 if (dump_file)
1333 int flags = flags_from_decl_or_type (node->decl);
1334 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1335 if (flags & ECF_CONST)
1336 fprintf (dump_file, " const");
1337 if (flags & ECF_PURE)
1338 fprintf (dump_file, " pure");
1339 if (flags & ECF_NOTHROW)
1340 fprintf (dump_file, " nothrow");
1341 fprintf (dump_file, "\n pure const state: %s\n",
1342 pure_const_names[fs->pure_const_state]);
1343 fprintf (dump_file, " previously known state: %s\n",
1344 pure_const_names[fs->state_previously_known]);
1345 if (fs->looping)
1346 fprintf (dump_file," function is locally looping\n");
1347 if (fs->looping_previously_known)
1348 fprintf (dump_file," function is previously known looping\n");
1349 if (fs->can_throw)
1350 fprintf (dump_file," function is locally throwing\n");
1351 if (fs->can_free)
1352 fprintf (dump_file," function can locally free\n");
1353 fprintf (dump_file, "\n malloc state: %s\n",
1354 malloc_state_names[fs->malloc_state]);
1358 lto_destroy_simple_input_block (file_data,
1359 LTO_section_ipa_pure_const,
1360 ib, data, len);
1365 /* We only propagate across edges that can throw externally and their callee
1366 is not interposable. */
1368 static bool
1369 ignore_edge_for_nothrow (struct cgraph_edge *e)
1371 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1372 return true;
1374 enum availability avail;
1375 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1376 e->caller);
1377 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1378 return true;
1379 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1380 && !e->callee->binds_to_current_def_p (e->caller);
1383 /* Return true if NODE is self recursive function.
1384 Indirectly recursive functions appears as non-trivial strongly
1385 connected components, so we need to care about self recursion
1386 only. */
1388 static bool
1389 self_recursive_p (struct cgraph_node *node)
1391 struct cgraph_edge *e;
1392 for (e = node->callees; e; e = e->next_callee)
1393 if (e->callee->function_symbol () == node)
1394 return true;
1395 return false;
1398 /* Return true if N is cdtor that is not const or pure. In this case we may
1399 need to remove unreachable function if it is marked const/pure. */
1401 static bool
1402 cdtor_p (cgraph_node *n, void *)
1404 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1405 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1406 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1407 return false;
1410 /* We only propagate across edges with non-interposable callee. */
1412 static bool
1413 ignore_edge_for_pure_const (struct cgraph_edge *e)
1415 enum availability avail;
1416 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1417 return (avail <= AVAIL_INTERPOSABLE);
1421 /* Produce transitive closure over the callgraph and compute pure/const
1422 attributes. */
1424 static bool
1425 propagate_pure_const (void)
1427 struct cgraph_node *node;
1428 struct cgraph_node *w;
1429 struct cgraph_node **order =
1430 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1431 int order_pos;
1432 int i;
1433 struct ipa_dfs_info * w_info;
1434 bool remove_p = false;
1435 bool has_cdtor;
1437 order_pos = ipa_reduced_postorder (order, true, false,
1438 ignore_edge_for_pure_const);
1439 if (dump_file)
1441 cgraph_node::dump_cgraph (dump_file);
1442 ipa_print_order (dump_file, "reduced", order, order_pos);
1445 /* Propagate the local information through the call graph to produce
1446 the global information. All the nodes within a cycle will have
1447 the same info so we collapse cycles first. Then we can do the
1448 propagation in one pass from the leaves to the roots. */
1449 for (i = 0; i < order_pos; i++ )
1451 enum pure_const_state_e pure_const_state = IPA_CONST;
1452 bool looping = false;
1453 int count = 0;
1454 node = order[i];
1456 if (node->alias)
1457 continue;
1459 if (dump_file && (dump_flags & TDF_DETAILS))
1460 fprintf (dump_file, "Starting cycle\n");
1462 /* Find the worst state for any node in the cycle. */
1463 w = node;
1464 while (w && pure_const_state != IPA_NEITHER)
1466 struct cgraph_edge *e;
1467 struct cgraph_edge *ie;
1468 int i;
1469 struct ipa_ref *ref = NULL;
1471 funct_state w_l = get_function_state (w);
1472 if (dump_file && (dump_flags & TDF_DETAILS))
1473 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1474 w->dump_name (),
1475 pure_const_names[w_l->pure_const_state],
1476 w_l->looping);
1478 /* First merge in function body properties.
1479 We are safe to pass NULL as FROM and TO because we will take care
1480 of possible interposition when walking callees. */
1481 worse_state (&pure_const_state, &looping,
1482 w_l->pure_const_state, w_l->looping,
1483 NULL, NULL);
1484 if (pure_const_state == IPA_NEITHER)
1485 break;
1487 count++;
1489 /* We consider recursive cycles as possibly infinite.
1490 This might be relaxed since infinite recursion leads to stack
1491 overflow. */
1492 if (count > 1)
1493 looping = true;
1495 /* Now walk the edges and merge in callee properties. */
1496 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1497 e = e->next_callee)
1499 enum availability avail;
1500 struct cgraph_node *y = e->callee->
1501 function_or_virtual_thunk_symbol (&avail,
1502 e->caller);
1503 enum pure_const_state_e edge_state = IPA_CONST;
1504 bool edge_looping = false;
1506 if (dump_file && (dump_flags & TDF_DETAILS))
1508 fprintf (dump_file, " Call to %s",
1509 e->callee->dump_name ());
1511 if (avail > AVAIL_INTERPOSABLE)
1513 funct_state y_l = get_function_state (y);
1514 if (dump_file && (dump_flags & TDF_DETAILS))
1516 fprintf (dump_file,
1517 " state:%s looping:%i\n",
1518 pure_const_names[y_l->pure_const_state],
1519 y_l->looping);
1521 if (y_l->pure_const_state > IPA_PURE
1522 && e->cannot_lead_to_return_p ())
1524 if (dump_file && (dump_flags & TDF_DETAILS))
1525 fprintf (dump_file,
1526 " Ignoring side effects"
1527 " -> pure, looping\n");
1528 edge_state = IPA_PURE;
1529 edge_looping = true;
1531 else
1533 edge_state = y_l->pure_const_state;
1534 edge_looping = y_l->looping;
1537 else if (special_builtin_state (&edge_state, &edge_looping,
1538 y->decl))
1540 else
1541 state_from_flags (&edge_state, &edge_looping,
1542 flags_from_decl_or_type (y->decl),
1543 e->cannot_lead_to_return_p ());
1545 /* Merge the results with what we already know. */
1546 better_state (&edge_state, &edge_looping,
1547 w_l->state_previously_known,
1548 w_l->looping_previously_known);
1549 worse_state (&pure_const_state, &looping,
1550 edge_state, edge_looping, e->caller, e->callee);
1551 if (pure_const_state == IPA_NEITHER)
1552 break;
1555 /* Now process the indirect call. */
1556 for (ie = w->indirect_calls;
1557 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1559 enum pure_const_state_e edge_state = IPA_CONST;
1560 bool edge_looping = false;
1562 if (dump_file && (dump_flags & TDF_DETAILS))
1563 fprintf (dump_file, " Indirect call");
1564 state_from_flags (&edge_state, &edge_looping,
1565 ie->indirect_info->ecf_flags,
1566 ie->cannot_lead_to_return_p ());
1567 /* Merge the results with what we already know. */
1568 better_state (&edge_state, &edge_looping,
1569 w_l->state_previously_known,
1570 w_l->looping_previously_known);
1571 worse_state (&pure_const_state, &looping,
1572 edge_state, edge_looping, NULL, NULL);
1573 if (pure_const_state == IPA_NEITHER)
1574 break;
1577 /* And finally all loads and stores. */
1578 for (i = 0; w->iterate_reference (i, ref)
1579 && pure_const_state != IPA_NEITHER; i++)
1581 enum pure_const_state_e ref_state = IPA_CONST;
1582 bool ref_looping = false;
1583 switch (ref->use)
1585 case IPA_REF_LOAD:
1586 /* readonly reads are safe. */
1587 if (TREE_READONLY (ref->referred->decl))
1588 break;
1589 if (dump_file && (dump_flags & TDF_DETAILS))
1590 fprintf (dump_file, " nonreadonly global var read\n");
1591 ref_state = IPA_PURE;
1592 break;
1593 case IPA_REF_STORE:
1594 if (ref->cannot_lead_to_return ())
1595 break;
1596 ref_state = IPA_NEITHER;
1597 if (dump_file && (dump_flags & TDF_DETAILS))
1598 fprintf (dump_file, " global var write\n");
1599 break;
1600 case IPA_REF_ADDR:
1601 case IPA_REF_CHKP:
1602 break;
1603 default:
1604 gcc_unreachable ();
1606 better_state (&ref_state, &ref_looping,
1607 w_l->state_previously_known,
1608 w_l->looping_previously_known);
1609 worse_state (&pure_const_state, &looping,
1610 ref_state, ref_looping, NULL, NULL);
1611 if (pure_const_state == IPA_NEITHER)
1612 break;
1614 w_info = (struct ipa_dfs_info *) w->aux;
1615 w = w_info->next_cycle;
1617 if (dump_file && (dump_flags & TDF_DETAILS))
1618 fprintf (dump_file, "Result %s looping %i\n",
1619 pure_const_names [pure_const_state],
1620 looping);
1622 /* Find the worst state of can_free for any node in the cycle. */
1623 bool can_free = false;
1624 w = node;
1625 while (w && !can_free)
1627 struct cgraph_edge *e;
1628 funct_state w_l = get_function_state (w);
1630 if (w_l->can_free
1631 || w->get_availability () == AVAIL_INTERPOSABLE
1632 || w->indirect_calls)
1633 can_free = true;
1635 for (e = w->callees; e && !can_free; e = e->next_callee)
1637 enum availability avail;
1638 struct cgraph_node *y = e->callee->
1639 function_or_virtual_thunk_symbol (&avail,
1640 e->caller);
1642 if (avail > AVAIL_INTERPOSABLE)
1643 can_free = get_function_state (y)->can_free;
1644 else
1645 can_free = true;
1647 w_info = (struct ipa_dfs_info *) w->aux;
1648 w = w_info->next_cycle;
1651 /* Copy back the region's pure_const_state which is shared by
1652 all nodes in the region. */
1653 w = node;
1654 while (w)
1656 funct_state w_l = get_function_state (w);
1657 enum pure_const_state_e this_state = pure_const_state;
1658 bool this_looping = looping;
1660 w_l->can_free = can_free;
1661 w->nonfreeing_fn = !can_free;
1662 if (!can_free && dump_file)
1663 fprintf (dump_file, "Function found not to call free: %s\n",
1664 w->name ());
1666 if (w_l->state_previously_known != IPA_NEITHER
1667 && this_state > w_l->state_previously_known)
1669 this_state = w_l->state_previously_known;
1670 if (this_state == IPA_NEITHER)
1671 this_looping = w_l->looping_previously_known;
1673 if (!this_looping && self_recursive_p (w))
1674 this_looping = true;
1675 if (!w_l->looping_previously_known)
1676 this_looping = false;
1678 /* All nodes within a cycle share the same info. */
1679 w_l->pure_const_state = this_state;
1680 w_l->looping = this_looping;
1682 /* Inline clones share declaration with their offline copies;
1683 do not modify their declarations since the offline copy may
1684 be different. */
1685 if (!w->global.inlined_to)
1686 switch (this_state)
1688 case IPA_CONST:
1689 if (!TREE_READONLY (w->decl))
1691 warn_function_const (w->decl, !this_looping);
1692 if (dump_file)
1693 fprintf (dump_file, "Function found to be %sconst: %s\n",
1694 this_looping ? "looping " : "",
1695 w->name ());
1697 /* Turning constructor or destructor to non-looping const/pure
1698 enables us to possibly remove the function completely. */
1699 if (this_looping)
1700 has_cdtor = false;
1701 else
1702 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1703 NULL, true);
1704 if (w->set_const_flag (true, this_looping))
1706 if (dump_file)
1707 fprintf (dump_file,
1708 "Declaration updated to be %sconst: %s\n",
1709 this_looping ? "looping " : "",
1710 w->name ());
1711 remove_p |= has_cdtor;
1713 break;
1715 case IPA_PURE:
1716 if (!DECL_PURE_P (w->decl))
1718 warn_function_pure (w->decl, !this_looping);
1719 if (dump_file)
1720 fprintf (dump_file, "Function found to be %spure: %s\n",
1721 this_looping ? "looping " : "",
1722 w->name ());
1724 if (this_looping)
1725 has_cdtor = false;
1726 else
1727 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1728 NULL, true);
1729 if (w->set_pure_flag (true, this_looping))
1731 if (dump_file)
1732 fprintf (dump_file,
1733 "Declaration updated to be %spure: %s\n",
1734 this_looping ? "looping " : "",
1735 w->name ());
1736 remove_p |= has_cdtor;
1738 break;
1740 default:
1741 break;
1743 w_info = (struct ipa_dfs_info *) w->aux;
1744 w = w_info->next_cycle;
1748 ipa_free_postorder_info ();
1749 free (order);
1750 return remove_p;
1753 /* Produce transitive closure over the callgraph and compute nothrow
1754 attributes. */
1756 static void
1757 propagate_nothrow (void)
1759 struct cgraph_node *node;
1760 struct cgraph_node *w;
1761 struct cgraph_node **order =
1762 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1763 int order_pos;
1764 int i;
1765 struct ipa_dfs_info * w_info;
1767 order_pos = ipa_reduced_postorder (order, true, false,
1768 ignore_edge_for_nothrow);
1769 if (dump_file)
1771 cgraph_node::dump_cgraph (dump_file);
1772 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1775 /* Propagate the local information through the call graph to produce
1776 the global information. All the nodes within a cycle will have
1777 the same info so we collapse cycles first. Then we can do the
1778 propagation in one pass from the leaves to the roots. */
1779 for (i = 0; i < order_pos; i++ )
1781 bool can_throw = false;
1782 node = order[i];
1784 if (node->alias)
1785 continue;
1787 /* Find the worst state for any node in the cycle. */
1788 w = node;
1789 while (w && !can_throw)
1791 struct cgraph_edge *e, *ie;
1793 if (!TREE_NOTHROW (w->decl))
1795 funct_state w_l = get_function_state (w);
1797 if (w_l->can_throw
1798 || w->get_availability () == AVAIL_INTERPOSABLE)
1799 can_throw = true;
1801 for (e = w->callees; e && !can_throw; e = e->next_callee)
1803 enum availability avail;
1805 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1806 continue;
1808 struct cgraph_node *y = e->callee->
1809 function_or_virtual_thunk_symbol (&avail,
1810 e->caller);
1812 /* We can use info about the callee only if we know it can
1813 not be interposed.
1814 When callee is compiled with non-call exceptions we also
1815 must check that the declaration is bound to current
1816 body as other semantically equivalent body may still
1817 throw. */
1818 if (avail <= AVAIL_INTERPOSABLE
1819 || (!TREE_NOTHROW (y->decl)
1820 && (get_function_state (y)->can_throw
1821 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1822 && !e->callee->binds_to_current_def_p (w)))))
1823 can_throw = true;
1825 for (ie = w->indirect_calls; ie && !can_throw;
1826 ie = ie->next_callee)
1827 if (ie->can_throw_external
1828 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1829 can_throw = true;
1831 w_info = (struct ipa_dfs_info *) w->aux;
1832 w = w_info->next_cycle;
1835 /* Copy back the region's pure_const_state which is shared by
1836 all nodes in the region. */
1837 w = node;
1838 while (w)
1840 funct_state w_l = get_function_state (w);
1841 if (!can_throw && !TREE_NOTHROW (w->decl))
1843 /* Inline clones share declaration with their offline copies;
1844 do not modify their declarations since the offline copy may
1845 be different. */
1846 if (!w->global.inlined_to)
1848 w->set_nothrow_flag (true);
1849 if (dump_file)
1850 fprintf (dump_file, "Function found to be nothrow: %s\n",
1851 w->name ());
1854 else if (can_throw && !TREE_NOTHROW (w->decl))
1855 w_l->can_throw = true;
1856 w_info = (struct ipa_dfs_info *) w->aux;
1857 w = w_info->next_cycle;
1861 ipa_free_postorder_info ();
1862 free (order);
1865 /* Debugging function to dump state of malloc lattice. */
1867 DEBUG_FUNCTION
1868 static void
1869 dump_malloc_lattice (FILE *dump_file, const char *s)
1871 if (!dump_file)
1872 return;
1874 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1875 cgraph_node *node;
1876 FOR_EACH_FUNCTION (node)
1878 funct_state fs = get_function_state (node);
1879 malloc_state_e state = fs->malloc_state;
1880 fprintf (dump_file, "%s: %s\n", node->name (), malloc_state_names[state]);
1884 /* Propagate malloc attribute across the callgraph. */
1886 static void
1887 propagate_malloc (void)
1889 cgraph_node *node;
1890 FOR_EACH_FUNCTION (node)
1892 if (DECL_IS_MALLOC (node->decl))
1893 if (!has_function_state (node))
1895 funct_state l = XCNEW (struct funct_state_d);
1896 *l = varying_state;
1897 l->malloc_state = STATE_MALLOC;
1898 set_function_state (node, l);
1902 dump_malloc_lattice (dump_file, "Initial");
1903 struct cgraph_node **order
1904 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1905 int order_pos = ipa_reverse_postorder (order);
1906 bool changed = true;
1908 while (changed)
1910 changed = false;
1911 /* Walk in postorder. */
1912 for (int i = order_pos - 1; i >= 0; --i)
1914 cgraph_node *node = order[i];
1915 if (node->alias
1916 || !node->definition
1917 || !has_function_state (node))
1918 continue;
1920 funct_state l = get_function_state (node);
1922 /* FIXME: add support for indirect-calls. */
1923 if (node->indirect_calls)
1925 l->malloc_state = STATE_MALLOC_BOTTOM;
1926 continue;
1929 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1931 l->malloc_state = STATE_MALLOC_BOTTOM;
1932 continue;
1935 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1936 continue;
1938 vec<cgraph_node *> callees = vNULL;
1939 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1941 ipa_call_summary *es = ipa_call_summaries->get (cs);
1942 if (es && es->is_return_callee_uncaptured)
1943 callees.safe_push (cs->callee);
1946 malloc_state_e new_state = l->malloc_state;
1947 for (unsigned j = 0; j < callees.length (); j++)
1949 cgraph_node *callee = callees[j];
1950 if (!has_function_state (callee))
1952 new_state = STATE_MALLOC_BOTTOM;
1953 break;
1955 malloc_state_e callee_state = get_function_state (callee)->malloc_state;
1956 if (new_state < callee_state)
1957 new_state = callee_state;
1959 if (new_state != l->malloc_state)
1961 changed = true;
1962 l->malloc_state = new_state;
1967 FOR_EACH_DEFINED_FUNCTION (node)
1968 if (has_function_state (node))
1970 funct_state l = get_function_state (node);
1971 if (!node->alias
1972 && l->malloc_state == STATE_MALLOC
1973 && !node->global.inlined_to)
1975 if (dump_file && (dump_flags & TDF_DETAILS))
1976 fprintf (dump_file, "Function %s found to be malloc\n",
1977 node->name ());
1979 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1980 node->set_malloc_flag (true);
1981 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1982 warn_function_malloc (node->decl);
1986 dump_malloc_lattice (dump_file, "after propagation");
1987 ipa_free_postorder_info ();
1988 free (order);
1991 /* Produce the global information by preforming a transitive closure
1992 on the local information that was produced by generate_summary. */
1994 unsigned int
1995 pass_ipa_pure_const::
1996 execute (function *)
1998 struct cgraph_node *node;
1999 bool remove_p;
2001 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
2002 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
2003 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
2005 /* Nothrow makes more function to not lead to return and improve
2006 later analysis. */
2007 propagate_nothrow ();
2008 propagate_malloc ();
2009 remove_p = propagate_pure_const ();
2011 /* Cleanup. */
2012 FOR_EACH_FUNCTION (node)
2013 if (has_function_state (node))
2014 free (get_function_state (node));
2015 funct_state_vec.release ();
2017 /* In WPA we use inline summaries for partitioning process. */
2018 if (!flag_wpa)
2019 ipa_free_fn_summary ();
2020 return remove_p ? TODO_remove_functions : 0;
2023 static bool
2024 gate_pure_const (void)
2026 return flag_ipa_pure_const || in_lto_p;
2029 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2030 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2031 pure_const_generate_summary, /* generate_summary */
2032 pure_const_write_summary, /* write_summary */
2033 pure_const_read_summary, /* read_summary */
2034 NULL, /* write_optimization_summary */
2035 NULL, /* read_optimization_summary */
2036 NULL, /* stmt_fixup */
2037 0, /* function_transform_todo_flags_start */
2038 NULL, /* function_transform */
2039 NULL), /* variable_transform */
2040 init_p(false),
2041 function_insertion_hook_holder(NULL),
2042 node_duplication_hook_holder(NULL),
2043 node_removal_hook_holder(NULL)
2047 ipa_opt_pass_d *
2048 make_pass_ipa_pure_const (gcc::context *ctxt)
2050 return new pass_ipa_pure_const (ctxt);
2053 /* Return true if function should be skipped for local pure const analysis. */
2055 static bool
2056 skip_function_for_local_pure_const (struct cgraph_node *node)
2058 /* Because we do not schedule pass_fixup_cfg over whole program after early
2059 optimizations we must not promote functions that are called by already
2060 processed functions. */
2062 if (function_called_by_processed_nodes_p ())
2064 if (dump_file)
2065 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2066 return true;
2068 /* Save some work and do not analyze functions which are interposable and
2069 do not have any non-interposable aliases. */
2070 if (node->get_availability () <= AVAIL_INTERPOSABLE
2071 && !node->has_aliases_p ())
2073 if (dump_file)
2074 fprintf (dump_file,
2075 "Function is interposable; not analyzing.\n");
2076 return true;
2078 return false;
2081 /* Simple local pass for pure const discovery reusing the analysis from
2082 ipa_pure_const. This pass is effective when executed together with
2083 other optimization passes in early optimization pass queue. */
2085 namespace {
2087 const pass_data pass_data_local_pure_const =
2089 GIMPLE_PASS, /* type */
2090 "local-pure-const", /* name */
2091 OPTGROUP_NONE, /* optinfo_flags */
2092 TV_IPA_PURE_CONST, /* tv_id */
2093 0, /* properties_required */
2094 0, /* properties_provided */
2095 0, /* properties_destroyed */
2096 0, /* todo_flags_start */
2097 0, /* todo_flags_finish */
2100 class pass_local_pure_const : public gimple_opt_pass
2102 public:
2103 pass_local_pure_const (gcc::context *ctxt)
2104 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2107 /* opt_pass methods: */
2108 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2109 virtual bool gate (function *) { return gate_pure_const (); }
2110 virtual unsigned int execute (function *);
2112 }; // class pass_local_pure_const
2114 unsigned int
2115 pass_local_pure_const::execute (function *fun)
2117 bool changed = false;
2118 funct_state l;
2119 bool skip;
2120 struct cgraph_node *node;
2122 node = cgraph_node::get (current_function_decl);
2123 skip = skip_function_for_local_pure_const (node);
2125 if (!warn_suggest_attribute_const
2126 && !warn_suggest_attribute_pure
2127 && skip)
2128 return 0;
2130 l = analyze_function (node, false);
2132 /* Do NORETURN discovery. */
2133 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2134 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2136 warn_function_noreturn (fun->decl);
2137 if (dump_file)
2138 fprintf (dump_file, "Function found to be noreturn: %s\n",
2139 current_function_name ());
2141 /* Update declaration and reduce profile to executed once. */
2142 TREE_THIS_VOLATILE (current_function_decl) = 1;
2143 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2144 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2146 changed = true;
2149 switch (l->pure_const_state)
2151 case IPA_CONST:
2152 if (!TREE_READONLY (current_function_decl))
2154 warn_function_const (current_function_decl, !l->looping);
2155 if (dump_file)
2156 fprintf (dump_file, "Function found to be %sconst: %s\n",
2157 l->looping ? "looping " : "",
2158 current_function_name ());
2160 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2161 && !l->looping)
2163 if (dump_file)
2164 fprintf (dump_file, "Function found to be non-looping: %s\n",
2165 current_function_name ());
2167 if (!skip && node->set_const_flag (true, l->looping))
2169 if (dump_file)
2170 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2171 l->looping ? "looping " : "",
2172 current_function_name ());
2173 changed = true;
2175 break;
2177 case IPA_PURE:
2178 if (!DECL_PURE_P (current_function_decl))
2180 warn_function_pure (current_function_decl, !l->looping);
2181 if (dump_file)
2182 fprintf (dump_file, "Function found to be %spure: %s\n",
2183 l->looping ? "looping " : "",
2184 current_function_name ());
2186 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2187 && !l->looping)
2189 if (dump_file)
2190 fprintf (dump_file, "Function found to be non-looping: %s\n",
2191 current_function_name ());
2193 if (!skip && node->set_pure_flag (true, l->looping))
2195 if (dump_file)
2196 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2197 l->looping ? "looping " : "",
2198 current_function_name ());
2199 changed = true;
2201 break;
2203 default:
2204 break;
2206 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2208 node->set_nothrow_flag (true);
2209 changed = true;
2210 if (dump_file)
2211 fprintf (dump_file, "Function found to be nothrow: %s\n",
2212 current_function_name ());
2215 if (l->malloc_state == STATE_MALLOC
2216 && !DECL_IS_MALLOC (current_function_decl))
2218 node->set_malloc_flag (true);
2219 if (warn_suggest_attribute_malloc)
2220 warn_function_malloc (node->decl);
2221 changed = true;
2222 if (dump_file)
2223 fprintf (dump_file, "Function found to be malloc: %s\n",
2224 node->name ());
2227 free (l);
2228 if (changed)
2229 return execute_fixup_cfg ();
2230 else
2231 return 0;
2234 } // anon namespace
2236 gimple_opt_pass *
2237 make_pass_local_pure_const (gcc::context *ctxt)
2239 return new pass_local_pure_const (ctxt);
2242 /* Emit noreturn warnings. */
2244 namespace {
2246 const pass_data pass_data_warn_function_noreturn =
2248 GIMPLE_PASS, /* type */
2249 "*warn_function_noreturn", /* name */
2250 OPTGROUP_NONE, /* optinfo_flags */
2251 TV_NONE, /* tv_id */
2252 PROP_cfg, /* properties_required */
2253 0, /* properties_provided */
2254 0, /* properties_destroyed */
2255 0, /* todo_flags_start */
2256 0, /* todo_flags_finish */
2259 class pass_warn_function_noreturn : public gimple_opt_pass
2261 public:
2262 pass_warn_function_noreturn (gcc::context *ctxt)
2263 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2266 /* opt_pass methods: */
2267 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2268 virtual unsigned int execute (function *fun)
2270 if (!TREE_THIS_VOLATILE (current_function_decl)
2271 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2272 warn_function_noreturn (current_function_decl);
2273 return 0;
2276 }; // class pass_warn_function_noreturn
2278 } // anon namespace
2280 gimple_opt_pass *
2281 make_pass_warn_function_noreturn (gcc::context *ctxt)
2283 return new pass_warn_function_noreturn (ctxt);
2286 /* Simple local pass for pure const discovery reusing the analysis from
2287 ipa_pure_const. This pass is effective when executed together with
2288 other optimization passes in early optimization pass queue. */
2290 namespace {
2292 const pass_data pass_data_nothrow =
2294 GIMPLE_PASS, /* type */
2295 "nothrow", /* name */
2296 OPTGROUP_NONE, /* optinfo_flags */
2297 TV_IPA_PURE_CONST, /* tv_id */
2298 0, /* properties_required */
2299 0, /* properties_provided */
2300 0, /* properties_destroyed */
2301 0, /* todo_flags_start */
2302 0, /* todo_flags_finish */
2305 class pass_nothrow : public gimple_opt_pass
2307 public:
2308 pass_nothrow (gcc::context *ctxt)
2309 : gimple_opt_pass (pass_data_nothrow, ctxt)
2312 /* opt_pass methods: */
2313 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2314 virtual bool gate (function *) { return optimize; }
2315 virtual unsigned int execute (function *);
2317 }; // class pass_nothrow
2319 unsigned int
2320 pass_nothrow::execute (function *)
2322 struct cgraph_node *node;
2323 basic_block this_block;
2325 if (TREE_NOTHROW (current_function_decl))
2326 return 0;
2328 node = cgraph_node::get (current_function_decl);
2330 /* We run during lowering, we can not really use availability yet. */
2331 if (cgraph_node::get (current_function_decl)->get_availability ()
2332 <= AVAIL_INTERPOSABLE)
2334 if (dump_file)
2335 fprintf (dump_file, "Function is interposable;"
2336 " not analyzing.\n");
2337 return true;
2340 FOR_EACH_BB_FN (this_block, cfun)
2342 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2343 !gsi_end_p (gsi);
2344 gsi_next (&gsi))
2345 if (stmt_can_throw_external (gsi_stmt (gsi)))
2347 if (is_gimple_call (gsi_stmt (gsi)))
2349 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2350 if (callee_t && recursive_call_p (current_function_decl,
2351 callee_t))
2352 continue;
2355 if (dump_file)
2357 fprintf (dump_file, "Statement can throw: ");
2358 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2360 return 0;
2364 node->set_nothrow_flag (true);
2366 bool cfg_changed = false;
2367 if (self_recursive_p (node))
2368 FOR_EACH_BB_FN (this_block, cfun)
2369 if (gimple *g = last_stmt (this_block))
2370 if (is_gimple_call (g))
2372 tree callee_t = gimple_call_fndecl (g);
2373 if (callee_t
2374 && recursive_call_p (current_function_decl, callee_t)
2375 && maybe_clean_eh_stmt (g)
2376 && gimple_purge_dead_eh_edges (this_block))
2377 cfg_changed = true;
2380 if (dump_file)
2381 fprintf (dump_file, "Function found to be nothrow: %s\n",
2382 current_function_name ());
2383 return cfg_changed ? TODO_cleanup_cfg : 0;
2386 } // anon namespace
2388 gimple_opt_pass *
2389 make_pass_nothrow (gcc::context *ctxt)
2391 return new pass_nothrow (ctxt);