2018-06-09 Steven G. Kargl <kargl@gcc.gnu.org>
[official-gcc.git] / gcc / ipa-pure-const.c
blob9441d2508ab7a8116bb7ded9e4887cd61894466e
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68 enum pure_const_state_e
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
77 enum malloc_state_e
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
87 decl. */
88 class funct_state_d
90 public:
91 funct_state_d (): pure_const_state (IPA_NEITHER),
92 state_previously_known (IPA_NEITHER), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM) {}
96 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97 state_previously_known (s.state_previously_known),
98 looping_previously_known (s.looping_previously_known),
99 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100 malloc_state (s.malloc_state) {}
102 /* See above. */
103 enum pure_const_state_e pure_const_state;
104 /* What user set here; we can be always sure about this. */
105 enum pure_const_state_e state_previously_known;
106 bool looping_previously_known;
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
114 bool looping;
116 bool can_throw;
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
120 bool can_free;
122 enum malloc_state_e malloc_state;
125 typedef struct funct_state_d * funct_state;
127 /* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
129 local info. */
131 class funct_state_summary_t: public function_summary <funct_state_d *>
133 public:
134 funct_state_summary_t (symbol_table *symtab):
135 function_summary <funct_state_d *> (symtab) {}
137 virtual void insert (cgraph_node *, funct_state_d *state);
138 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
139 funct_state_d *src_data,
140 funct_state_d *dst_data);
143 static funct_state_summary_t *funct_state_summaries = NULL;
145 static bool gate_pure_const (void);
147 namespace {
149 const pass_data pass_data_ipa_pure_const =
151 IPA_PASS, /* type */
152 "pure-const", /* name */
153 OPTGROUP_NONE, /* optinfo_flags */
154 TV_IPA_PURE_CONST, /* tv_id */
155 0, /* properties_required */
156 0, /* properties_provided */
157 0, /* properties_destroyed */
158 0, /* todo_flags_start */
159 0, /* todo_flags_finish */
162 class pass_ipa_pure_const : public ipa_opt_pass_d
164 public:
165 pass_ipa_pure_const(gcc::context *ctxt);
167 /* opt_pass methods: */
168 bool gate (function *) { return gate_pure_const (); }
169 unsigned int execute (function *fun);
171 void register_hooks (void);
173 private:
174 bool init_p;
175 }; // class pass_ipa_pure_const
177 } // anon namespace
179 /* Try to guess if function body will always be visible to compiler
180 when compiling the call and whether compiler will be able
181 to propagate the information by itself. */
183 static bool
184 function_always_visible_to_compiler_p (tree decl)
186 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
187 || DECL_COMDAT (decl));
190 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
191 is true if the function is known to be finite. The diagnostic is
192 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
193 OPTION, this function may initialize it and it is always returned
194 by the function. */
196 static hash_set<tree> *
197 suggest_attribute (int option, tree decl, bool known_finite,
198 hash_set<tree> *warned_about,
199 const char * attrib_name)
201 if (!option_enabled (option, &global_options))
202 return warned_about;
203 if (TREE_THIS_VOLATILE (decl)
204 || (known_finite && function_always_visible_to_compiler_p (decl)))
205 return warned_about;
207 if (!warned_about)
208 warned_about = new hash_set<tree>;
209 if (warned_about->contains (decl))
210 return warned_about;
211 warned_about->add (decl);
212 warning_at (DECL_SOURCE_LOCATION (decl),
213 option,
214 known_finite
215 ? G_("function might be candidate for attribute %qs")
216 : G_("function might be candidate for attribute %qs"
217 " if it is known to return normally"), attrib_name);
218 return warned_about;
221 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
222 is true if the function is known to be finite. */
224 static void
225 warn_function_pure (tree decl, bool known_finite)
227 /* Declaring a void function pure makes no sense and is diagnosed
228 by -Wattributes because calling it would have no effect. */
229 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
230 return;
232 static hash_set<tree> *warned_about;
233 warned_about
234 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
235 known_finite, warned_about, "pure");
238 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
239 is true if the function is known to be finite. */
241 static void
242 warn_function_const (tree decl, bool known_finite)
244 /* Declaring a void function const makes no sense is diagnosed
245 by -Wattributes because calling it would have no effect. */
246 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
247 return;
249 static hash_set<tree> *warned_about;
250 warned_about
251 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
252 known_finite, warned_about, "const");
255 /* Emit suggestion about __attribute__((malloc)) for DECL. */
257 static void
258 warn_function_malloc (tree decl)
260 static hash_set<tree> *warned_about;
261 warned_about
262 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
263 true, warned_about, "malloc");
266 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
268 static void
269 warn_function_noreturn (tree decl)
271 tree original_decl = decl;
273 static hash_set<tree> *warned_about;
274 if (!lang_hooks.missing_noreturn_ok_p (decl)
275 && targetm.warn_func_return (decl))
276 warned_about
277 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
278 true, warned_about, "noreturn");
281 void
282 warn_function_cold (tree decl)
284 tree original_decl = decl;
286 static hash_set<tree> *warned_about;
287 warned_about
288 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
289 true, warned_about, "cold");
292 /* Check to see if the use (or definition when CHECKING_WRITE is true)
293 variable T is legal in a function that is either pure or const. */
295 static inline void
296 check_decl (funct_state local,
297 tree t, bool checking_write, bool ipa)
299 /* Do not want to do anything with volatile except mark any
300 function that uses one to be not const or pure. */
301 if (TREE_THIS_VOLATILE (t))
303 local->pure_const_state = IPA_NEITHER;
304 if (dump_file)
305 fprintf (dump_file, " Volatile operand is not const/pure\n");
306 return;
309 /* Do not care about a local automatic that is not static. */
310 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
311 return;
313 /* If the variable has the "used" attribute, treat it as if it had a
314 been touched by the devil. */
315 if (DECL_PRESERVE_P (t))
317 local->pure_const_state = IPA_NEITHER;
318 if (dump_file)
319 fprintf (dump_file, " Used static/global variable is not const/pure\n");
320 return;
323 /* In IPA mode we are not interested in checking actual loads and stores;
324 they will be processed at propagation time using ipa_ref. */
325 if (ipa)
326 return;
328 /* Since we have dealt with the locals and params cases above, if we
329 are CHECKING_WRITE, this cannot be a pure or constant
330 function. */
331 if (checking_write)
333 local->pure_const_state = IPA_NEITHER;
334 if (dump_file)
335 fprintf (dump_file, " static/global memory write is not const/pure\n");
336 return;
339 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
341 /* Readonly reads are safe. */
342 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
343 return; /* Read of a constant, do not change the function state. */
344 else
346 if (dump_file)
347 fprintf (dump_file, " global memory read is not const\n");
348 /* Just a regular read. */
349 if (local->pure_const_state == IPA_CONST)
350 local->pure_const_state = IPA_PURE;
353 else
355 /* Compilation level statics can be read if they are readonly
356 variables. */
357 if (TREE_READONLY (t))
358 return;
360 if (dump_file)
361 fprintf (dump_file, " static memory read is not const\n");
362 /* Just a regular read. */
363 if (local->pure_const_state == IPA_CONST)
364 local->pure_const_state = IPA_PURE;
369 /* Check to see if the use (or definition when CHECKING_WRITE is true)
370 variable T is legal in a function that is either pure or const. */
372 static inline void
373 check_op (funct_state local, tree t, bool checking_write)
375 t = get_base_address (t);
376 if (t && TREE_THIS_VOLATILE (t))
378 local->pure_const_state = IPA_NEITHER;
379 if (dump_file)
380 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
381 return;
383 else if (t
384 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
385 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
386 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
388 if (dump_file)
389 fprintf (dump_file, " Indirect ref to local memory is OK\n");
390 return;
392 else if (checking_write)
394 local->pure_const_state = IPA_NEITHER;
395 if (dump_file)
396 fprintf (dump_file, " Indirect ref write is not const/pure\n");
397 return;
399 else
401 if (dump_file)
402 fprintf (dump_file, " Indirect ref read is not const\n");
403 if (local->pure_const_state == IPA_CONST)
404 local->pure_const_state = IPA_PURE;
408 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
410 static void
411 state_from_flags (enum pure_const_state_e *state, bool *looping,
412 int flags, bool cannot_lead_to_return)
414 *looping = false;
415 if (flags & ECF_LOOPING_CONST_OR_PURE)
417 *looping = true;
418 if (dump_file && (dump_flags & TDF_DETAILS))
419 fprintf (dump_file, " looping\n");
421 if (flags & ECF_CONST)
423 *state = IPA_CONST;
424 if (dump_file && (dump_flags & TDF_DETAILS))
425 fprintf (dump_file, " const\n");
427 else if (flags & ECF_PURE)
429 *state = IPA_PURE;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " pure\n");
433 else if (cannot_lead_to_return)
435 *state = IPA_PURE;
436 *looping = true;
437 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, " ignoring side effects->pure looping\n");
440 else
442 if (dump_file && (dump_flags & TDF_DETAILS))
443 fprintf (dump_file, " neither\n");
444 *state = IPA_NEITHER;
445 *looping = true;
449 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING better of the two variants.
451 Be sure to merge looping correctly. IPA_NEITHER functions
452 have looping 0 even if they don't have to return. */
454 static inline void
455 better_state (enum pure_const_state_e *state, bool *looping,
456 enum pure_const_state_e state2, bool looping2)
458 if (state2 < *state)
460 if (*state == IPA_NEITHER)
461 *looping = looping2;
462 else
463 *looping = MIN (*looping, looping2);
464 *state = state2;
466 else if (state2 != IPA_NEITHER)
467 *looping = MIN (*looping, looping2);
470 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
471 into STATE and LOOPING worse of the two variants.
472 N is the actual node called. */
474 static inline void
475 worse_state (enum pure_const_state_e *state, bool *looping,
476 enum pure_const_state_e state2, bool looping2,
477 struct symtab_node *from,
478 struct symtab_node *to)
480 /* Consider function:
482 bool a(int *p)
484 return *p==*p;
487 During early optimization we will turn this into:
489 bool a(int *p)
491 return true;
494 Now if this function will be detected as CONST however when interposed it
495 may end up being just pure. We always must assume the worst scenario here.
497 if (*state == IPA_CONST && state2 == IPA_CONST
498 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
500 if (dump_file && (dump_flags & TDF_DETAILS))
501 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
502 "bind to current def.\n", to->name ());
503 state2 = IPA_PURE;
505 *state = MAX (*state, state2);
506 *looping = MAX (*looping, looping2);
509 /* Recognize special cases of builtins that are by themselves not pure or const
510 but function using them is. */
511 static bool
512 special_builtin_state (enum pure_const_state_e *state, bool *looping,
513 tree callee)
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
520 CASE_BUILT_IN_ALLOCA:
521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY:
530 case BUILT_IN_APPLY_ARGS:
531 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
532 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
533 *looping = false;
534 *state = IPA_CONST;
535 return true;
536 case BUILT_IN_PREFETCH:
537 *looping = true;
538 *state = IPA_CONST;
539 return true;
540 default:
541 break;
543 return false;
546 /* Check the parameters of a function call to CALL_EXPR to see if
547 there are any references in the parameters that are not allowed for
548 pure or const functions. Also check to see if this is either an
549 indirect call, a call outside the compilation unit, or has special
550 attributes that may also effect the purity. The CALL_EXPR node for
551 the entire call expression. */
553 static void
554 check_call (funct_state local, gcall *call, bool ipa)
556 int flags = gimple_call_flags (call);
557 tree callee_t = gimple_call_fndecl (call);
558 bool possibly_throws = stmt_could_throw_p (call);
559 bool possibly_throws_externally = (possibly_throws
560 && stmt_can_throw_external (call));
562 if (possibly_throws)
564 unsigned int i;
565 for (i = 0; i < gimple_num_ops (call); i++)
566 if (gimple_op (call, i)
567 && tree_could_throw_p (gimple_op (call, i)))
569 if (possibly_throws && cfun->can_throw_non_call_exceptions)
571 if (dump_file)
572 fprintf (dump_file, " operand can throw; looping\n");
573 local->looping = true;
575 if (possibly_throws_externally)
577 if (dump_file)
578 fprintf (dump_file, " operand can throw externally\n");
579 local->can_throw = true;
584 /* The const and pure flags are set by a variety of places in the
585 compiler (including here). If someone has already set the flags
586 for the callee, (such as for some of the builtins) we will use
587 them, otherwise we will compute our own information.
589 Const and pure functions have less clobber effects than other
590 functions so we process these first. Otherwise if it is a call
591 outside the compilation unit or an indirect call we punt. This
592 leaves local calls which will be processed by following the call
593 graph. */
594 if (callee_t)
596 enum pure_const_state_e call_state;
597 bool call_looping;
599 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
600 && !nonfreeing_call_p (call))
601 local->can_free = true;
603 if (special_builtin_state (&call_state, &call_looping, callee_t))
605 worse_state (&local->pure_const_state, &local->looping,
606 call_state, call_looping,
607 NULL, NULL);
608 return;
610 /* When bad things happen to bad functions, they cannot be const
611 or pure. */
612 if (setjmp_call_p (callee_t))
614 if (dump_file)
615 fprintf (dump_file, " setjmp is not const/pure\n");
616 local->looping = true;
617 local->pure_const_state = IPA_NEITHER;
620 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
621 switch (DECL_FUNCTION_CODE (callee_t))
623 case BUILT_IN_LONGJMP:
624 case BUILT_IN_NONLOCAL_GOTO:
625 if (dump_file)
626 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
627 local->pure_const_state = IPA_NEITHER;
628 local->looping = true;
629 break;
630 default:
631 break;
634 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635 local->can_free = true;
637 /* When not in IPA mode, we can still handle self recursion. */
638 if (!ipa && callee_t
639 && recursive_call_p (current_function_decl, callee_t))
641 if (dump_file)
642 fprintf (dump_file, " Recursive call can loop.\n");
643 local->looping = true;
645 /* Either callee is unknown or we are doing local analysis.
646 Look to see if there are any bits available for the callee (such as by
647 declaration or because it is builtin) and process solely on the basis of
648 those bits. Handle internal calls always, those calls don't have
649 corresponding cgraph edges and thus aren't processed during
650 the propagation. */
651 else if (!ipa || gimple_call_internal_p (call))
653 enum pure_const_state_e call_state;
654 bool call_looping;
655 if (possibly_throws && cfun->can_throw_non_call_exceptions)
657 if (dump_file)
658 fprintf (dump_file, " can throw; looping\n");
659 local->looping = true;
661 if (possibly_throws_externally)
663 if (dump_file)
665 fprintf (dump_file, " can throw externally to lp %i\n",
666 lookup_stmt_eh_lp (call));
667 if (callee_t)
668 fprintf (dump_file, " callee:%s\n",
669 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
671 local->can_throw = true;
673 if (dump_file && (dump_flags & TDF_DETAILS))
674 fprintf (dump_file, " checking flags for call:");
675 state_from_flags (&call_state, &call_looping, flags,
676 ((flags & (ECF_NORETURN | ECF_NOTHROW))
677 == (ECF_NORETURN | ECF_NOTHROW))
678 || (!flag_exceptions && (flags & ECF_NORETURN)));
679 worse_state (&local->pure_const_state, &local->looping,
680 call_state, call_looping, NULL, NULL);
682 /* Direct functions calls are handled by IPA propagation. */
685 /* Wrapper around check_decl for loads in local more. */
687 static bool
688 check_load (gimple *, tree op, tree, void *data)
690 if (DECL_P (op))
691 check_decl ((funct_state)data, op, false, false);
692 else
693 check_op ((funct_state)data, op, false);
694 return false;
697 /* Wrapper around check_decl for stores in local more. */
699 static bool
700 check_store (gimple *, tree op, tree, void *data)
702 if (DECL_P (op))
703 check_decl ((funct_state)data, op, true, false);
704 else
705 check_op ((funct_state)data, op, true);
706 return false;
709 /* Wrapper around check_decl for loads in ipa mode. */
711 static bool
712 check_ipa_load (gimple *, tree op, tree, void *data)
714 if (DECL_P (op))
715 check_decl ((funct_state)data, op, false, true);
716 else
717 check_op ((funct_state)data, op, false);
718 return false;
721 /* Wrapper around check_decl for stores in ipa mode. */
723 static bool
724 check_ipa_store (gimple *, tree op, tree, void *data)
726 if (DECL_P (op))
727 check_decl ((funct_state)data, op, true, true);
728 else
729 check_op ((funct_state)data, op, true);
730 return false;
733 /* Look into pointer pointed to by GSIP and figure out what interesting side
734 effects it has. */
735 static void
736 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
738 gimple *stmt = gsi_stmt (*gsip);
740 if (is_gimple_debug (stmt))
741 return;
743 /* Do consider clobber as side effects before IPA, so we rather inline
744 C++ destructors and keep clobber semantics than eliminate them.
746 TODO: We may get smarter during early optimizations on these and let
747 functions containing only clobbers to be optimized more. This is a common
748 case of C++ destructors. */
750 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
751 return;
753 if (dump_file)
755 fprintf (dump_file, " scanning: ");
756 print_gimple_stmt (dump_file, stmt, 0);
759 if (gimple_has_volatile_ops (stmt)
760 && !gimple_clobber_p (stmt))
762 local->pure_const_state = IPA_NEITHER;
763 if (dump_file)
764 fprintf (dump_file, " Volatile stmt is not const/pure\n");
767 /* Look for loads and stores. */
768 walk_stmt_load_store_ops (stmt, local,
769 ipa ? check_ipa_load : check_load,
770 ipa ? check_ipa_store : check_store);
772 if (gimple_code (stmt) != GIMPLE_CALL
773 && stmt_could_throw_p (stmt))
775 if (cfun->can_throw_non_call_exceptions)
777 if (dump_file)
778 fprintf (dump_file, " can throw; looping\n");
779 local->looping = true;
781 if (stmt_can_throw_external (stmt))
783 if (dump_file)
784 fprintf (dump_file, " can throw externally\n");
785 local->can_throw = true;
787 else
788 if (dump_file)
789 fprintf (dump_file, " can throw\n");
791 switch (gimple_code (stmt))
793 case GIMPLE_CALL:
794 check_call (local, as_a <gcall *> (stmt), ipa);
795 break;
796 case GIMPLE_LABEL:
797 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
798 /* Target of long jump. */
800 if (dump_file)
801 fprintf (dump_file, " nonlocal label is not const/pure\n");
802 local->pure_const_state = IPA_NEITHER;
804 break;
805 case GIMPLE_ASM:
806 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
808 if (dump_file)
809 fprintf (dump_file, " memory asm clobber is not const/pure\n");
810 /* Abandon all hope, ye who enter here. */
811 local->pure_const_state = IPA_NEITHER;
812 local->can_free = true;
814 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
816 if (dump_file)
817 fprintf (dump_file, " volatile is not const/pure\n");
818 /* Abandon all hope, ye who enter here. */
819 local->pure_const_state = IPA_NEITHER;
820 local->looping = true;
821 local->can_free = true;
823 return;
824 default:
825 break;
829 /* Check that RETVAL is used only in STMT and in comparisons against 0.
830 RETVAL is return value of the function and STMT is return stmt. */
832 static bool
833 check_retval_uses (tree retval, gimple *stmt)
835 imm_use_iterator use_iter;
836 gimple *use_stmt;
838 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
839 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
841 tree op2 = gimple_cond_rhs (cond);
842 if (!integer_zerop (op2))
843 RETURN_FROM_IMM_USE_STMT (use_iter, false);
845 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
847 enum tree_code code = gimple_assign_rhs_code (ga);
848 if (TREE_CODE_CLASS (code) != tcc_comparison)
849 RETURN_FROM_IMM_USE_STMT (use_iter, false);
850 if (!integer_zerop (gimple_assign_rhs2 (ga)))
851 RETURN_FROM_IMM_USE_STMT (use_iter, false);
853 else if (is_gimple_debug (use_stmt))
855 else if (use_stmt != stmt)
856 RETURN_FROM_IMM_USE_STMT (use_iter, false);
858 return true;
861 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
862 attribute. Currently this function does a very conservative analysis.
863 FUN is considered to be a candidate if
864 1) It returns a value of pointer type.
865 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
866 a phi, and element of phi is either NULL or
867 SSA_NAME_DEF_STMT(element) is function call.
868 3) The return-value has immediate uses only within comparisons (gcond or gassign)
869 and return_stmt (and likewise a phi arg has immediate use only within comparison
870 or the phi stmt). */
872 static bool
873 malloc_candidate_p (function *fun, bool ipa)
875 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
876 edge e;
877 edge_iterator ei;
878 cgraph_node *node = cgraph_node::get_create (fun->decl);
880 #define DUMP_AND_RETURN(reason) \
882 if (dump_file && (dump_flags & TDF_DETAILS)) \
883 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
884 (node->name()), (reason)); \
885 return false; \
888 if (EDGE_COUNT (exit_block->preds) == 0
889 || !flag_delete_null_pointer_checks)
890 return false;
892 FOR_EACH_EDGE (e, ei, exit_block->preds)
894 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
895 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
897 if (!ret_stmt)
898 return false;
900 tree retval = gimple_return_retval (ret_stmt);
901 if (!retval)
902 DUMP_AND_RETURN("No return value.")
904 if (TREE_CODE (retval) != SSA_NAME
905 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
906 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
908 if (!check_retval_uses (retval, ret_stmt))
909 DUMP_AND_RETURN("Return value has uses outside return stmt"
910 " and comparisons against 0.")
912 gimple *def = SSA_NAME_DEF_STMT (retval);
913 if (gcall *call_stmt = dyn_cast<gcall *> (def))
915 tree callee_decl = gimple_call_fndecl (call_stmt);
916 if (!callee_decl)
917 return false;
919 if (!ipa && !DECL_IS_MALLOC (callee_decl))
920 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
921 " non-ipa mode.")
923 cgraph_edge *cs = node->get_edge (call_stmt);
924 if (cs)
926 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
927 gcc_assert (es);
928 es->is_return_callee_uncaptured = true;
932 else if (gphi *phi = dyn_cast<gphi *> (def))
934 bool all_args_zero = true;
935 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
937 tree arg = gimple_phi_arg_def (phi, i);
938 if (integer_zerop (arg))
939 continue;
941 all_args_zero = false;
942 if (TREE_CODE (arg) != SSA_NAME)
943 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
944 if (!check_retval_uses (arg, phi))
945 DUMP_AND_RETURN ("phi arg has uses outside phi"
946 " and comparisons against 0.")
948 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
949 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
950 if (!call_stmt)
951 return false;
952 tree callee_decl = gimple_call_fndecl (call_stmt);
953 if (!callee_decl)
954 return false;
955 if (!ipa && !DECL_IS_MALLOC (callee_decl))
956 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
957 " for non-ipa mode.")
959 cgraph_edge *cs = node->get_edge (call_stmt);
960 if (cs)
962 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
963 gcc_assert (es);
964 es->is_return_callee_uncaptured = true;
968 if (all_args_zero)
969 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.");
972 else
973 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
976 if (dump_file && (dump_flags & TDF_DETAILS))
977 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
978 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
979 return true;
981 #undef DUMP_AND_RETURN
985 /* This is the main routine for finding the reference patterns for
986 global variables within a function FN. */
988 static funct_state
989 analyze_function (struct cgraph_node *fn, bool ipa)
991 tree decl = fn->decl;
992 funct_state l;
993 basic_block this_block;
995 l = XCNEW (struct funct_state_d);
996 l->pure_const_state = IPA_CONST;
997 l->state_previously_known = IPA_NEITHER;
998 l->looping_previously_known = true;
999 l->looping = false;
1000 l->can_throw = false;
1001 l->can_free = false;
1002 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1003 flags_from_decl_or_type (fn->decl),
1004 fn->cannot_return_p ());
1006 if (fn->thunk.thunk_p || fn->alias)
1008 /* Thunk gets propagated through, so nothing interesting happens. */
1009 gcc_assert (ipa);
1010 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1011 l->pure_const_state = IPA_NEITHER;
1012 return l;
1015 if (dump_file)
1017 fprintf (dump_file, "\n\n local analysis of %s\n ",
1018 fn->name ());
1021 push_cfun (DECL_STRUCT_FUNCTION (decl));
1023 FOR_EACH_BB_FN (this_block, cfun)
1025 gimple_stmt_iterator gsi;
1026 struct walk_stmt_info wi;
1028 memset (&wi, 0, sizeof (wi));
1029 for (gsi = gsi_start_bb (this_block);
1030 !gsi_end_p (gsi);
1031 gsi_next (&gsi))
1033 check_stmt (&gsi, l, ipa);
1034 if (l->pure_const_state == IPA_NEITHER
1035 && l->looping
1036 && l->can_throw
1037 && l->can_free)
1038 goto end;
1042 end:
1043 if (l->pure_const_state != IPA_NEITHER)
1045 /* Const functions cannot have back edges (an
1046 indication of possible infinite loop side
1047 effect. */
1048 if (mark_dfs_back_edges ())
1050 /* Preheaders are needed for SCEV to work.
1051 Simple latches and recorded exits improve chances that loop will
1052 proved to be finite in testcases such as in loop-15.c
1053 and loop-24.c */
1054 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1055 | LOOPS_HAVE_SIMPLE_LATCHES
1056 | LOOPS_HAVE_RECORDED_EXITS);
1057 if (dump_file && (dump_flags & TDF_DETAILS))
1058 flow_loops_dump (dump_file, NULL, 0);
1059 if (mark_irreducible_loops ())
1061 if (dump_file)
1062 fprintf (dump_file, " has irreducible loops\n");
1063 l->looping = true;
1065 else
1067 struct loop *loop;
1068 scev_initialize ();
1069 FOR_EACH_LOOP (loop, 0)
1070 if (!finite_loop_p (loop))
1072 if (dump_file)
1073 fprintf (dump_file, " can not prove finiteness of "
1074 "loop %i\n", loop->num);
1075 l->looping =true;
1076 break;
1078 scev_finalize ();
1080 loop_optimizer_finalize ();
1084 if (dump_file && (dump_flags & TDF_DETAILS))
1085 fprintf (dump_file, " checking previously known:");
1087 better_state (&l->pure_const_state, &l->looping,
1088 l->state_previously_known,
1089 l->looping_previously_known);
1090 if (TREE_NOTHROW (decl))
1091 l->can_throw = false;
1093 l->malloc_state = STATE_MALLOC_BOTTOM;
1094 if (DECL_IS_MALLOC (decl))
1095 l->malloc_state = STATE_MALLOC;
1096 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1097 l->malloc_state = STATE_MALLOC_TOP;
1098 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1099 l->malloc_state = STATE_MALLOC;
1101 pop_cfun ();
1102 if (dump_file)
1104 if (l->looping)
1105 fprintf (dump_file, "Function is locally looping.\n");
1106 if (l->can_throw)
1107 fprintf (dump_file, "Function is locally throwing.\n");
1108 if (l->pure_const_state == IPA_CONST)
1109 fprintf (dump_file, "Function is locally const.\n");
1110 if (l->pure_const_state == IPA_PURE)
1111 fprintf (dump_file, "Function is locally pure.\n");
1112 if (l->can_free)
1113 fprintf (dump_file, "Function can locally free.\n");
1114 if (l->malloc_state == STATE_MALLOC)
1115 fprintf (dump_file, "Function is locally malloc.\n");
1117 return l;
1120 void
1121 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1123 /* There are some shared nodes, in particular the initializers on
1124 static declarations. We do not need to scan them more than once
1125 since all we would be interested in are the addressof
1126 operations. */
1127 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1129 funct_state_d *a = analyze_function (node, true);
1130 new (state) funct_state_d (*a);
1131 free (a);
1135 /* Called when new clone is inserted to callgraph late. */
1137 void
1138 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *,
1139 funct_state_d *src_data,
1140 funct_state_d *dst_data)
1142 new (dst_data) funct_state_d (*src_data);
1146 void
1147 pass_ipa_pure_const::
1148 register_hooks (void)
1150 if (init_p)
1151 return;
1153 init_p = true;
1155 funct_state_summaries = new funct_state_summary_t (symtab);
1159 /* Analyze each function in the cgraph to see if it is locally PURE or
1160 CONST. */
1162 static void
1163 pure_const_generate_summary (void)
1165 struct cgraph_node *node;
1167 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1168 pass->register_hooks ();
1170 /* Process all of the functions.
1172 We process AVAIL_INTERPOSABLE functions. We can not use the results
1173 by default, but the info can be used at LTO with -fwhole-program or
1174 when function got cloned and the clone is AVAILABLE. */
1176 FOR_EACH_DEFINED_FUNCTION (node)
1177 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1179 funct_state_d *a = analyze_function (node, true);
1180 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1181 free (a);
1186 /* Serialize the ipa info for lto. */
1188 static void
1189 pure_const_write_summary (void)
1191 struct cgraph_node *node;
1192 struct lto_simple_output_block *ob
1193 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1194 unsigned int count = 0;
1195 lto_symtab_encoder_iterator lsei;
1196 lto_symtab_encoder_t encoder;
1198 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1200 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1201 lsei_next_function_in_partition (&lsei))
1203 node = lsei_cgraph_node (lsei);
1204 if (node->definition && funct_state_summaries->exists (node))
1205 count++;
1208 streamer_write_uhwi_stream (ob->main_stream, count);
1210 /* Process all of the functions. */
1211 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1212 lsei_next_function_in_partition (&lsei))
1214 node = lsei_cgraph_node (lsei);
1215 funct_state_d *fs = funct_state_summaries->get (node);
1216 if (node->definition && fs != NULL)
1218 struct bitpack_d bp;
1219 int node_ref;
1220 lto_symtab_encoder_t encoder;
1222 encoder = ob->decl_state->symtab_node_encoder;
1223 node_ref = lto_symtab_encoder_encode (encoder, node);
1224 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1226 /* Note that flags will need to be read in the opposite
1227 order as we are pushing the bitflags into FLAGS. */
1228 bp = bitpack_create (ob->main_stream);
1229 bp_pack_value (&bp, fs->pure_const_state, 2);
1230 bp_pack_value (&bp, fs->state_previously_known, 2);
1231 bp_pack_value (&bp, fs->looping_previously_known, 1);
1232 bp_pack_value (&bp, fs->looping, 1);
1233 bp_pack_value (&bp, fs->can_throw, 1);
1234 bp_pack_value (&bp, fs->can_free, 1);
1235 bp_pack_value (&bp, fs->malloc_state, 2);
1236 streamer_write_bitpack (&bp);
1240 lto_destroy_simple_output_block (ob);
1244 /* Deserialize the ipa info for lto. */
1246 static void
1247 pure_const_read_summary (void)
1249 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1250 struct lto_file_decl_data *file_data;
1251 unsigned int j = 0;
1253 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1254 pass->register_hooks ();
1256 while ((file_data = file_data_vec[j++]))
1258 const char *data;
1259 size_t len;
1260 struct lto_input_block *ib
1261 = lto_create_simple_input_block (file_data,
1262 LTO_section_ipa_pure_const,
1263 &data, &len);
1264 if (ib)
1266 unsigned int i;
1267 unsigned int count = streamer_read_uhwi (ib);
1269 for (i = 0; i < count; i++)
1271 unsigned int index;
1272 struct cgraph_node *node;
1273 struct bitpack_d bp;
1274 funct_state fs;
1275 lto_symtab_encoder_t encoder;
1277 index = streamer_read_uhwi (ib);
1278 encoder = file_data->symtab_node_encoder;
1279 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1280 index));
1282 fs = funct_state_summaries->get_create (node);
1283 /* Note that the flags must be read in the opposite
1284 order in which they were written (the bitflags were
1285 pushed into FLAGS). */
1286 bp = streamer_read_bitpack (ib);
1287 fs->pure_const_state
1288 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1289 fs->state_previously_known
1290 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1291 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1292 fs->looping = bp_unpack_value (&bp, 1);
1293 fs->can_throw = bp_unpack_value (&bp, 1);
1294 fs->can_free = bp_unpack_value (&bp, 1);
1295 fs->malloc_state
1296 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1298 if (dump_file)
1300 int flags = flags_from_decl_or_type (node->decl);
1301 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1302 if (flags & ECF_CONST)
1303 fprintf (dump_file, " const");
1304 if (flags & ECF_PURE)
1305 fprintf (dump_file, " pure");
1306 if (flags & ECF_NOTHROW)
1307 fprintf (dump_file, " nothrow");
1308 fprintf (dump_file, "\n pure const state: %s\n",
1309 pure_const_names[fs->pure_const_state]);
1310 fprintf (dump_file, " previously known state: %s\n",
1311 pure_const_names[fs->state_previously_known]);
1312 if (fs->looping)
1313 fprintf (dump_file," function is locally looping\n");
1314 if (fs->looping_previously_known)
1315 fprintf (dump_file," function is previously known looping\n");
1316 if (fs->can_throw)
1317 fprintf (dump_file," function is locally throwing\n");
1318 if (fs->can_free)
1319 fprintf (dump_file," function can locally free\n");
1320 fprintf (dump_file, "\n malloc state: %s\n",
1321 malloc_state_names[fs->malloc_state]);
1325 lto_destroy_simple_input_block (file_data,
1326 LTO_section_ipa_pure_const,
1327 ib, data, len);
1332 /* We only propagate across edges that can throw externally and their callee
1333 is not interposable. */
1335 static bool
1336 ignore_edge_for_nothrow (struct cgraph_edge *e)
1338 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1339 return true;
1341 enum availability avail;
1342 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1343 e->caller);
1344 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1345 return true;
1346 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1347 && !e->callee->binds_to_current_def_p (e->caller);
1350 /* Return true if NODE is self recursive function.
1351 Indirectly recursive functions appears as non-trivial strongly
1352 connected components, so we need to care about self recursion
1353 only. */
1355 static bool
1356 self_recursive_p (struct cgraph_node *node)
1358 struct cgraph_edge *e;
1359 for (e = node->callees; e; e = e->next_callee)
1360 if (e->callee->function_symbol () == node)
1361 return true;
1362 return false;
1365 /* Return true if N is cdtor that is not const or pure. In this case we may
1366 need to remove unreachable function if it is marked const/pure. */
1368 static bool
1369 cdtor_p (cgraph_node *n, void *)
1371 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1372 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1373 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1374 return false;
1377 /* We only propagate across edges with non-interposable callee. */
1379 static bool
1380 ignore_edge_for_pure_const (struct cgraph_edge *e)
1382 enum availability avail;
1383 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1384 return (avail <= AVAIL_INTERPOSABLE);
1388 /* Produce transitive closure over the callgraph and compute pure/const
1389 attributes. */
1391 static bool
1392 propagate_pure_const (void)
1394 struct cgraph_node *node;
1395 struct cgraph_node *w;
1396 struct cgraph_node **order =
1397 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1398 int order_pos;
1399 int i;
1400 struct ipa_dfs_info * w_info;
1401 bool remove_p = false;
1402 bool has_cdtor;
1404 order_pos = ipa_reduced_postorder (order, true, false,
1405 ignore_edge_for_pure_const);
1406 if (dump_file)
1408 cgraph_node::dump_cgraph (dump_file);
1409 ipa_print_order (dump_file, "reduced", order, order_pos);
1412 /* Propagate the local information through the call graph to produce
1413 the global information. All the nodes within a cycle will have
1414 the same info so we collapse cycles first. Then we can do the
1415 propagation in one pass from the leaves to the roots. */
1416 for (i = 0; i < order_pos; i++ )
1418 enum pure_const_state_e pure_const_state = IPA_CONST;
1419 bool looping = false;
1420 int count = 0;
1421 node = order[i];
1423 if (node->alias)
1424 continue;
1426 if (dump_file && (dump_flags & TDF_DETAILS))
1427 fprintf (dump_file, "Starting cycle\n");
1429 /* Find the worst state for any node in the cycle. */
1430 w = node;
1431 while (w && pure_const_state != IPA_NEITHER)
1433 struct cgraph_edge *e;
1434 struct cgraph_edge *ie;
1435 int i;
1436 struct ipa_ref *ref = NULL;
1438 funct_state w_l = funct_state_summaries->get_create (w);
1439 if (dump_file && (dump_flags & TDF_DETAILS))
1440 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1441 w->dump_name (),
1442 pure_const_names[w_l->pure_const_state],
1443 w_l->looping);
1445 /* First merge in function body properties.
1446 We are safe to pass NULL as FROM and TO because we will take care
1447 of possible interposition when walking callees. */
1448 worse_state (&pure_const_state, &looping,
1449 w_l->pure_const_state, w_l->looping,
1450 NULL, NULL);
1451 if (pure_const_state == IPA_NEITHER)
1452 break;
1454 count++;
1456 /* We consider recursive cycles as possibly infinite.
1457 This might be relaxed since infinite recursion leads to stack
1458 overflow. */
1459 if (count > 1)
1460 looping = true;
1462 /* Now walk the edges and merge in callee properties. */
1463 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1464 e = e->next_callee)
1466 enum availability avail;
1467 struct cgraph_node *y = e->callee->
1468 function_or_virtual_thunk_symbol (&avail,
1469 e->caller);
1470 enum pure_const_state_e edge_state = IPA_CONST;
1471 bool edge_looping = false;
1473 if (dump_file && (dump_flags & TDF_DETAILS))
1475 fprintf (dump_file, " Call to %s",
1476 e->callee->dump_name ());
1478 if (avail > AVAIL_INTERPOSABLE)
1480 funct_state y_l = funct_state_summaries->get (y);
1481 if (dump_file && (dump_flags & TDF_DETAILS))
1483 fprintf (dump_file,
1484 " state:%s looping:%i\n",
1485 pure_const_names[y_l->pure_const_state],
1486 y_l->looping);
1488 if (y_l->pure_const_state > IPA_PURE
1489 && e->cannot_lead_to_return_p ())
1491 if (dump_file && (dump_flags & TDF_DETAILS))
1492 fprintf (dump_file,
1493 " Ignoring side effects"
1494 " -> pure, looping\n");
1495 edge_state = IPA_PURE;
1496 edge_looping = true;
1498 else
1500 edge_state = y_l->pure_const_state;
1501 edge_looping = y_l->looping;
1504 else if (special_builtin_state (&edge_state, &edge_looping,
1505 y->decl))
1507 else
1508 state_from_flags (&edge_state, &edge_looping,
1509 flags_from_decl_or_type (y->decl),
1510 e->cannot_lead_to_return_p ());
1512 /* Merge the results with what we already know. */
1513 better_state (&edge_state, &edge_looping,
1514 w_l->state_previously_known,
1515 w_l->looping_previously_known);
1516 worse_state (&pure_const_state, &looping,
1517 edge_state, edge_looping, e->caller, e->callee);
1518 if (pure_const_state == IPA_NEITHER)
1519 break;
1522 /* Now process the indirect call. */
1523 for (ie = w->indirect_calls;
1524 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1526 enum pure_const_state_e edge_state = IPA_CONST;
1527 bool edge_looping = false;
1529 if (dump_file && (dump_flags & TDF_DETAILS))
1530 fprintf (dump_file, " Indirect call");
1531 state_from_flags (&edge_state, &edge_looping,
1532 ie->indirect_info->ecf_flags,
1533 ie->cannot_lead_to_return_p ());
1534 /* Merge the results with what we already know. */
1535 better_state (&edge_state, &edge_looping,
1536 w_l->state_previously_known,
1537 w_l->looping_previously_known);
1538 worse_state (&pure_const_state, &looping,
1539 edge_state, edge_looping, NULL, NULL);
1540 if (pure_const_state == IPA_NEITHER)
1541 break;
1544 /* And finally all loads and stores. */
1545 for (i = 0; w->iterate_reference (i, ref)
1546 && pure_const_state != IPA_NEITHER; i++)
1548 enum pure_const_state_e ref_state = IPA_CONST;
1549 bool ref_looping = false;
1550 switch (ref->use)
1552 case IPA_REF_LOAD:
1553 /* readonly reads are safe. */
1554 if (TREE_READONLY (ref->referred->decl))
1555 break;
1556 if (dump_file && (dump_flags & TDF_DETAILS))
1557 fprintf (dump_file, " nonreadonly global var read\n");
1558 ref_state = IPA_PURE;
1559 break;
1560 case IPA_REF_STORE:
1561 if (ref->cannot_lead_to_return ())
1562 break;
1563 ref_state = IPA_NEITHER;
1564 if (dump_file && (dump_flags & TDF_DETAILS))
1565 fprintf (dump_file, " global var write\n");
1566 break;
1567 case IPA_REF_ADDR:
1568 break;
1569 default:
1570 gcc_unreachable ();
1572 better_state (&ref_state, &ref_looping,
1573 w_l->state_previously_known,
1574 w_l->looping_previously_known);
1575 worse_state (&pure_const_state, &looping,
1576 ref_state, ref_looping, NULL, NULL);
1577 if (pure_const_state == IPA_NEITHER)
1578 break;
1580 w_info = (struct ipa_dfs_info *) w->aux;
1581 w = w_info->next_cycle;
1583 if (dump_file && (dump_flags & TDF_DETAILS))
1584 fprintf (dump_file, "Result %s looping %i\n",
1585 pure_const_names [pure_const_state],
1586 looping);
1588 /* Find the worst state of can_free for any node in the cycle. */
1589 bool can_free = false;
1590 w = node;
1591 while (w && !can_free)
1593 struct cgraph_edge *e;
1594 funct_state w_l = funct_state_summaries->get (w);
1596 if (w_l->can_free
1597 || w->get_availability () == AVAIL_INTERPOSABLE
1598 || w->indirect_calls)
1599 can_free = true;
1601 for (e = w->callees; e && !can_free; e = e->next_callee)
1603 enum availability avail;
1604 struct cgraph_node *y = e->callee->
1605 function_or_virtual_thunk_symbol (&avail,
1606 e->caller);
1608 if (avail > AVAIL_INTERPOSABLE)
1609 can_free = funct_state_summaries->get (y)->can_free;
1610 else
1611 can_free = true;
1613 w_info = (struct ipa_dfs_info *) w->aux;
1614 w = w_info->next_cycle;
1617 /* Copy back the region's pure_const_state which is shared by
1618 all nodes in the region. */
1619 w = node;
1620 while (w)
1622 funct_state w_l = funct_state_summaries->get (w);
1623 enum pure_const_state_e this_state = pure_const_state;
1624 bool this_looping = looping;
1626 w_l->can_free = can_free;
1627 w->nonfreeing_fn = !can_free;
1628 if (!can_free && dump_file)
1629 fprintf (dump_file, "Function found not to call free: %s\n",
1630 w->name ());
1632 if (w_l->state_previously_known != IPA_NEITHER
1633 && this_state > w_l->state_previously_known)
1635 this_state = w_l->state_previously_known;
1636 if (this_state == IPA_NEITHER)
1637 this_looping = w_l->looping_previously_known;
1639 if (!this_looping && self_recursive_p (w))
1640 this_looping = true;
1641 if (!w_l->looping_previously_known)
1642 this_looping = false;
1644 /* All nodes within a cycle share the same info. */
1645 w_l->pure_const_state = this_state;
1646 w_l->looping = this_looping;
1648 /* Inline clones share declaration with their offline copies;
1649 do not modify their declarations since the offline copy may
1650 be different. */
1651 if (!w->global.inlined_to)
1652 switch (this_state)
1654 case IPA_CONST:
1655 if (!TREE_READONLY (w->decl))
1657 warn_function_const (w->decl, !this_looping);
1658 if (dump_file)
1659 fprintf (dump_file, "Function found to be %sconst: %s\n",
1660 this_looping ? "looping " : "",
1661 w->name ());
1663 /* Turning constructor or destructor to non-looping const/pure
1664 enables us to possibly remove the function completely. */
1665 if (this_looping)
1666 has_cdtor = false;
1667 else
1668 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1669 NULL, true);
1670 if (w->set_const_flag (true, this_looping))
1672 if (dump_file)
1673 fprintf (dump_file,
1674 "Declaration updated to be %sconst: %s\n",
1675 this_looping ? "looping " : "",
1676 w->name ());
1677 remove_p |= has_cdtor;
1679 break;
1681 case IPA_PURE:
1682 if (!DECL_PURE_P (w->decl))
1684 warn_function_pure (w->decl, !this_looping);
1685 if (dump_file)
1686 fprintf (dump_file, "Function found to be %spure: %s\n",
1687 this_looping ? "looping " : "",
1688 w->name ());
1690 if (this_looping)
1691 has_cdtor = false;
1692 else
1693 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1694 NULL, true);
1695 if (w->set_pure_flag (true, this_looping))
1697 if (dump_file)
1698 fprintf (dump_file,
1699 "Declaration updated to be %spure: %s\n",
1700 this_looping ? "looping " : "",
1701 w->name ());
1702 remove_p |= has_cdtor;
1704 break;
1706 default:
1707 break;
1709 w_info = (struct ipa_dfs_info *) w->aux;
1710 w = w_info->next_cycle;
1714 ipa_free_postorder_info ();
1715 free (order);
1716 return remove_p;
1719 /* Produce transitive closure over the callgraph and compute nothrow
1720 attributes. */
1722 static void
1723 propagate_nothrow (void)
1725 struct cgraph_node *node;
1726 struct cgraph_node *w;
1727 struct cgraph_node **order =
1728 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1729 int order_pos;
1730 int i;
1731 struct ipa_dfs_info * w_info;
1733 order_pos = ipa_reduced_postorder (order, true, false,
1734 ignore_edge_for_nothrow);
1735 if (dump_file)
1737 cgraph_node::dump_cgraph (dump_file);
1738 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1741 /* Propagate the local information through the call graph to produce
1742 the global information. All the nodes within a cycle will have
1743 the same info so we collapse cycles first. Then we can do the
1744 propagation in one pass from the leaves to the roots. */
1745 for (i = 0; i < order_pos; i++ )
1747 bool can_throw = false;
1748 node = order[i];
1750 if (node->alias)
1751 continue;
1753 /* Find the worst state for any node in the cycle. */
1754 w = node;
1755 while (w && !can_throw)
1757 struct cgraph_edge *e, *ie;
1759 if (!TREE_NOTHROW (w->decl))
1761 funct_state w_l = funct_state_summaries->get_create (w);
1763 if (w_l->can_throw
1764 || w->get_availability () == AVAIL_INTERPOSABLE)
1765 can_throw = true;
1767 for (e = w->callees; e && !can_throw; e = e->next_callee)
1769 enum availability avail;
1771 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1772 continue;
1774 struct cgraph_node *y = e->callee->
1775 function_or_virtual_thunk_symbol (&avail,
1776 e->caller);
1778 /* We can use info about the callee only if we know it can
1779 not be interposed.
1780 When callee is compiled with non-call exceptions we also
1781 must check that the declaration is bound to current
1782 body as other semantically equivalent body may still
1783 throw. */
1784 if (avail <= AVAIL_INTERPOSABLE
1785 || (!TREE_NOTHROW (y->decl)
1786 && (funct_state_summaries->get_create (y)->can_throw
1787 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1788 && !e->callee->binds_to_current_def_p (w)))))
1789 can_throw = true;
1791 for (ie = w->indirect_calls; ie && !can_throw;
1792 ie = ie->next_callee)
1793 if (ie->can_throw_external
1794 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1795 can_throw = true;
1797 w_info = (struct ipa_dfs_info *) w->aux;
1798 w = w_info->next_cycle;
1801 /* Copy back the region's pure_const_state which is shared by
1802 all nodes in the region. */
1803 w = node;
1804 while (w)
1806 funct_state w_l = funct_state_summaries->get_create (w);
1807 if (!can_throw && !TREE_NOTHROW (w->decl))
1809 /* Inline clones share declaration with their offline copies;
1810 do not modify their declarations since the offline copy may
1811 be different. */
1812 if (!w->global.inlined_to)
1814 w->set_nothrow_flag (true);
1815 if (dump_file)
1816 fprintf (dump_file, "Function found to be nothrow: %s\n",
1817 w->name ());
1820 else if (can_throw && !TREE_NOTHROW (w->decl))
1821 w_l->can_throw = true;
1822 w_info = (struct ipa_dfs_info *) w->aux;
1823 w = w_info->next_cycle;
1827 ipa_free_postorder_info ();
1828 free (order);
1831 /* Debugging function to dump state of malloc lattice. */
1833 DEBUG_FUNCTION
1834 static void
1835 dump_malloc_lattice (FILE *dump_file, const char *s)
1837 if (!dump_file)
1838 return;
1840 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1841 cgraph_node *node;
1842 FOR_EACH_FUNCTION (node)
1844 funct_state fs = funct_state_summaries->get_create (node);
1845 malloc_state_e state = fs->malloc_state;
1846 fprintf (dump_file, "%s: %s\n", node->name (), malloc_state_names[state]);
1850 /* Propagate malloc attribute across the callgraph. */
1852 static void
1853 propagate_malloc (void)
1855 cgraph_node *node;
1856 FOR_EACH_FUNCTION (node)
1858 if (DECL_IS_MALLOC (node->decl))
1859 if (!funct_state_summaries->exists (node))
1861 funct_state fs = funct_state_summaries->get_create (node);
1862 fs->malloc_state = STATE_MALLOC;
1866 dump_malloc_lattice (dump_file, "Initial");
1867 struct cgraph_node **order
1868 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1869 int order_pos = ipa_reverse_postorder (order);
1870 bool changed = true;
1872 while (changed)
1874 changed = false;
1875 /* Walk in postorder. */
1876 for (int i = order_pos - 1; i >= 0; --i)
1878 cgraph_node *node = order[i];
1879 if (node->alias
1880 || !node->definition
1881 || !funct_state_summaries->exists (node))
1882 continue;
1884 funct_state l = funct_state_summaries->get_create (node);
1886 /* FIXME: add support for indirect-calls. */
1887 if (node->indirect_calls)
1889 l->malloc_state = STATE_MALLOC_BOTTOM;
1890 continue;
1893 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1895 l->malloc_state = STATE_MALLOC_BOTTOM;
1896 continue;
1899 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1900 continue;
1902 vec<cgraph_node *> callees = vNULL;
1903 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1905 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1906 if (es && es->is_return_callee_uncaptured)
1907 callees.safe_push (cs->callee);
1910 malloc_state_e new_state = l->malloc_state;
1911 for (unsigned j = 0; j < callees.length (); j++)
1913 cgraph_node *callee = callees[j];
1914 if (!funct_state_summaries->exists (node))
1916 new_state = STATE_MALLOC_BOTTOM;
1917 break;
1919 malloc_state_e callee_state
1920 = funct_state_summaries->get_create (callee)->malloc_state;
1921 if (new_state < callee_state)
1922 new_state = callee_state;
1924 if (new_state != l->malloc_state)
1926 changed = true;
1927 l->malloc_state = new_state;
1932 FOR_EACH_DEFINED_FUNCTION (node)
1933 if (funct_state_summaries->exists (node))
1935 funct_state l = funct_state_summaries->get_create (node);
1936 if (!node->alias
1937 && l->malloc_state == STATE_MALLOC
1938 && !node->global.inlined_to)
1940 if (dump_file && (dump_flags & TDF_DETAILS))
1941 fprintf (dump_file, "Function %s found to be malloc\n",
1942 node->name ());
1944 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1945 node->set_malloc_flag (true);
1946 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1947 warn_function_malloc (node->decl);
1951 dump_malloc_lattice (dump_file, "after propagation");
1952 ipa_free_postorder_info ();
1953 free (order);
1956 /* Produce the global information by preforming a transitive closure
1957 on the local information that was produced by generate_summary. */
1959 unsigned int
1960 pass_ipa_pure_const::
1961 execute (function *)
1963 bool remove_p;
1965 /* Nothrow makes more function to not lead to return and improve
1966 later analysis. */
1967 propagate_nothrow ();
1968 propagate_malloc ();
1969 remove_p = propagate_pure_const ();
1971 delete funct_state_summaries;
1972 return remove_p ? TODO_remove_functions : 0;
1975 static bool
1976 gate_pure_const (void)
1978 return flag_ipa_pure_const || in_lto_p;
1981 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
1982 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
1983 pure_const_generate_summary, /* generate_summary */
1984 pure_const_write_summary, /* write_summary */
1985 pure_const_read_summary, /* read_summary */
1986 NULL, /* write_optimization_summary */
1987 NULL, /* read_optimization_summary */
1988 NULL, /* stmt_fixup */
1989 0, /* function_transform_todo_flags_start */
1990 NULL, /* function_transform */
1991 NULL), /* variable_transform */
1992 init_p (false) {}
1994 ipa_opt_pass_d *
1995 make_pass_ipa_pure_const (gcc::context *ctxt)
1997 return new pass_ipa_pure_const (ctxt);
2000 /* Return true if function should be skipped for local pure const analysis. */
2002 static bool
2003 skip_function_for_local_pure_const (struct cgraph_node *node)
2005 /* Because we do not schedule pass_fixup_cfg over whole program after early
2006 optimizations we must not promote functions that are called by already
2007 processed functions. */
2009 if (function_called_by_processed_nodes_p ())
2011 if (dump_file)
2012 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2013 return true;
2015 /* Save some work and do not analyze functions which are interposable and
2016 do not have any non-interposable aliases. */
2017 if (node->get_availability () <= AVAIL_INTERPOSABLE
2018 && !node->has_aliases_p ())
2020 if (dump_file)
2021 fprintf (dump_file,
2022 "Function is interposable; not analyzing.\n");
2023 return true;
2025 return false;
2028 /* Simple local pass for pure const discovery reusing the analysis from
2029 ipa_pure_const. This pass is effective when executed together with
2030 other optimization passes in early optimization pass queue. */
2032 namespace {
2034 const pass_data pass_data_local_pure_const =
2036 GIMPLE_PASS, /* type */
2037 "local-pure-const", /* name */
2038 OPTGROUP_NONE, /* optinfo_flags */
2039 TV_IPA_PURE_CONST, /* tv_id */
2040 0, /* properties_required */
2041 0, /* properties_provided */
2042 0, /* properties_destroyed */
2043 0, /* todo_flags_start */
2044 0, /* todo_flags_finish */
2047 class pass_local_pure_const : public gimple_opt_pass
2049 public:
2050 pass_local_pure_const (gcc::context *ctxt)
2051 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2054 /* opt_pass methods: */
2055 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2056 virtual bool gate (function *) { return gate_pure_const (); }
2057 virtual unsigned int execute (function *);
2059 }; // class pass_local_pure_const
2061 unsigned int
2062 pass_local_pure_const::execute (function *fun)
2064 bool changed = false;
2065 funct_state l;
2066 bool skip;
2067 struct cgraph_node *node;
2069 node = cgraph_node::get (current_function_decl);
2070 skip = skip_function_for_local_pure_const (node);
2072 if (!warn_suggest_attribute_const
2073 && !warn_suggest_attribute_pure
2074 && skip)
2075 return 0;
2077 l = analyze_function (node, false);
2079 /* Do NORETURN discovery. */
2080 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2081 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2083 warn_function_noreturn (fun->decl);
2084 if (dump_file)
2085 fprintf (dump_file, "Function found to be noreturn: %s\n",
2086 current_function_name ());
2088 /* Update declaration and reduce profile to executed once. */
2089 TREE_THIS_VOLATILE (current_function_decl) = 1;
2090 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2091 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2093 changed = true;
2096 switch (l->pure_const_state)
2098 case IPA_CONST:
2099 if (!TREE_READONLY (current_function_decl))
2101 warn_function_const (current_function_decl, !l->looping);
2102 if (dump_file)
2103 fprintf (dump_file, "Function found to be %sconst: %s\n",
2104 l->looping ? "looping " : "",
2105 current_function_name ());
2107 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2108 && !l->looping)
2110 if (dump_file)
2111 fprintf (dump_file, "Function found to be non-looping: %s\n",
2112 current_function_name ());
2114 if (!skip && node->set_const_flag (true, l->looping))
2116 if (dump_file)
2117 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2118 l->looping ? "looping " : "",
2119 current_function_name ());
2120 changed = true;
2122 break;
2124 case IPA_PURE:
2125 if (!DECL_PURE_P (current_function_decl))
2127 warn_function_pure (current_function_decl, !l->looping);
2128 if (dump_file)
2129 fprintf (dump_file, "Function found to be %spure: %s\n",
2130 l->looping ? "looping " : "",
2131 current_function_name ());
2133 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2134 && !l->looping)
2136 if (dump_file)
2137 fprintf (dump_file, "Function found to be non-looping: %s\n",
2138 current_function_name ());
2140 if (!skip && node->set_pure_flag (true, l->looping))
2142 if (dump_file)
2143 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2144 l->looping ? "looping " : "",
2145 current_function_name ());
2146 changed = true;
2148 break;
2150 default:
2151 break;
2153 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2155 node->set_nothrow_flag (true);
2156 changed = true;
2157 if (dump_file)
2158 fprintf (dump_file, "Function found to be nothrow: %s\n",
2159 current_function_name ());
2162 if (l->malloc_state == STATE_MALLOC
2163 && !DECL_IS_MALLOC (current_function_decl))
2165 node->set_malloc_flag (true);
2166 if (warn_suggest_attribute_malloc)
2167 warn_function_malloc (node->decl);
2168 changed = true;
2169 if (dump_file)
2170 fprintf (dump_file, "Function found to be malloc: %s\n",
2171 node->name ());
2174 free (l);
2175 if (changed)
2176 return execute_fixup_cfg ();
2177 else
2178 return 0;
2181 } // anon namespace
2183 gimple_opt_pass *
2184 make_pass_local_pure_const (gcc::context *ctxt)
2186 return new pass_local_pure_const (ctxt);
2189 /* Emit noreturn warnings. */
2191 namespace {
2193 const pass_data pass_data_warn_function_noreturn =
2195 GIMPLE_PASS, /* type */
2196 "*warn_function_noreturn", /* name */
2197 OPTGROUP_NONE, /* optinfo_flags */
2198 TV_NONE, /* tv_id */
2199 PROP_cfg, /* properties_required */
2200 0, /* properties_provided */
2201 0, /* properties_destroyed */
2202 0, /* todo_flags_start */
2203 0, /* todo_flags_finish */
2206 class pass_warn_function_noreturn : public gimple_opt_pass
2208 public:
2209 pass_warn_function_noreturn (gcc::context *ctxt)
2210 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2213 /* opt_pass methods: */
2214 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2215 virtual unsigned int execute (function *fun)
2217 if (!TREE_THIS_VOLATILE (current_function_decl)
2218 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2219 warn_function_noreturn (current_function_decl);
2220 return 0;
2223 }; // class pass_warn_function_noreturn
2225 } // anon namespace
2227 gimple_opt_pass *
2228 make_pass_warn_function_noreturn (gcc::context *ctxt)
2230 return new pass_warn_function_noreturn (ctxt);
2233 /* Simple local pass for pure const discovery reusing the analysis from
2234 ipa_pure_const. This pass is effective when executed together with
2235 other optimization passes in early optimization pass queue. */
2237 namespace {
2239 const pass_data pass_data_nothrow =
2241 GIMPLE_PASS, /* type */
2242 "nothrow", /* name */
2243 OPTGROUP_NONE, /* optinfo_flags */
2244 TV_IPA_PURE_CONST, /* tv_id */
2245 0, /* properties_required */
2246 0, /* properties_provided */
2247 0, /* properties_destroyed */
2248 0, /* todo_flags_start */
2249 0, /* todo_flags_finish */
2252 class pass_nothrow : public gimple_opt_pass
2254 public:
2255 pass_nothrow (gcc::context *ctxt)
2256 : gimple_opt_pass (pass_data_nothrow, ctxt)
2259 /* opt_pass methods: */
2260 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2261 virtual bool gate (function *) { return optimize; }
2262 virtual unsigned int execute (function *);
2264 }; // class pass_nothrow
2266 unsigned int
2267 pass_nothrow::execute (function *)
2269 struct cgraph_node *node;
2270 basic_block this_block;
2272 if (TREE_NOTHROW (current_function_decl))
2273 return 0;
2275 node = cgraph_node::get (current_function_decl);
2277 /* We run during lowering, we can not really use availability yet. */
2278 if (cgraph_node::get (current_function_decl)->get_availability ()
2279 <= AVAIL_INTERPOSABLE)
2281 if (dump_file)
2282 fprintf (dump_file, "Function is interposable;"
2283 " not analyzing.\n");
2284 return true;
2287 FOR_EACH_BB_FN (this_block, cfun)
2289 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2290 !gsi_end_p (gsi);
2291 gsi_next (&gsi))
2292 if (stmt_can_throw_external (gsi_stmt (gsi)))
2294 if (is_gimple_call (gsi_stmt (gsi)))
2296 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2297 if (callee_t && recursive_call_p (current_function_decl,
2298 callee_t))
2299 continue;
2302 if (dump_file)
2304 fprintf (dump_file, "Statement can throw: ");
2305 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2307 return 0;
2311 node->set_nothrow_flag (true);
2313 bool cfg_changed = false;
2314 if (self_recursive_p (node))
2315 FOR_EACH_BB_FN (this_block, cfun)
2316 if (gimple *g = last_stmt (this_block))
2317 if (is_gimple_call (g))
2319 tree callee_t = gimple_call_fndecl (g);
2320 if (callee_t
2321 && recursive_call_p (current_function_decl, callee_t)
2322 && maybe_clean_eh_stmt (g)
2323 && gimple_purge_dead_eh_edges (this_block))
2324 cfg_changed = true;
2327 if (dump_file)
2328 fprintf (dump_file, "Function found to be nothrow: %s\n",
2329 current_function_name ());
2330 return cfg_changed ? TODO_cleanup_cfg : 0;
2333 } // anon namespace
2335 gimple_opt_pass *
2336 make_pass_nothrow (gcc::context *ctxt)
2338 return new pass_nothrow (ctxt);