typeck.c (cp_build_function_call_vec): When mark_used fails unconditionally return...
[official-gcc.git] / gcc / ipa-pure-const.c
blobbb561d00853f5d148d37b5cecd30a5f2e8db38ef
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68 enum pure_const_state_e
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
77 enum malloc_state_e
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
87 decl. */
88 class funct_state_d
90 public:
91 funct_state_d (): pure_const_state (IPA_NEITHER),
92 state_previously_known (IPA_NEITHER), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM) {}
96 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97 state_previously_known (s.state_previously_known),
98 looping_previously_known (s.looping_previously_known),
99 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100 malloc_state (s.malloc_state) {}
102 /* See above. */
103 enum pure_const_state_e pure_const_state;
104 /* What user set here; we can be always sure about this. */
105 enum pure_const_state_e state_previously_known;
106 bool looping_previously_known;
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
114 bool looping;
116 bool can_throw;
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
120 bool can_free;
122 enum malloc_state_e malloc_state;
125 typedef struct funct_state_d * funct_state;
127 /* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
129 local info. */
131 class funct_state_summary_t:
132 public fast_function_summary <funct_state_d *, va_heap>
134 public:
135 funct_state_summary_t (symbol_table *symtab):
136 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
138 virtual void insert (cgraph_node *, funct_state_d *state);
139 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
140 funct_state_d *src_data,
141 funct_state_d *dst_data);
144 static funct_state_summary_t *funct_state_summaries = NULL;
146 static bool gate_pure_const (void);
148 namespace {
150 const pass_data pass_data_ipa_pure_const =
152 IPA_PASS, /* type */
153 "pure-const", /* name */
154 OPTGROUP_NONE, /* optinfo_flags */
155 TV_IPA_PURE_CONST, /* tv_id */
156 0, /* properties_required */
157 0, /* properties_provided */
158 0, /* properties_destroyed */
159 0, /* todo_flags_start */
160 0, /* todo_flags_finish */
163 class pass_ipa_pure_const : public ipa_opt_pass_d
165 public:
166 pass_ipa_pure_const(gcc::context *ctxt);
168 /* opt_pass methods: */
169 bool gate (function *) { return gate_pure_const (); }
170 unsigned int execute (function *fun);
172 void register_hooks (void);
174 private:
175 bool init_p;
176 }; // class pass_ipa_pure_const
178 } // anon namespace
180 /* Try to guess if function body will always be visible to compiler
181 when compiling the call and whether compiler will be able
182 to propagate the information by itself. */
184 static bool
185 function_always_visible_to_compiler_p (tree decl)
187 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
188 || DECL_COMDAT (decl));
191 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
192 is true if the function is known to be finite. The diagnostic is
193 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
194 OPTION, this function may initialize it and it is always returned
195 by the function. */
197 static hash_set<tree> *
198 suggest_attribute (int option, tree decl, bool known_finite,
199 hash_set<tree> *warned_about,
200 const char * attrib_name)
202 if (!option_enabled (option, &global_options))
203 return warned_about;
204 if (TREE_THIS_VOLATILE (decl)
205 || (known_finite && function_always_visible_to_compiler_p (decl)))
206 return warned_about;
208 if (!warned_about)
209 warned_about = new hash_set<tree>;
210 if (warned_about->contains (decl))
211 return warned_about;
212 warned_about->add (decl);
213 warning_at (DECL_SOURCE_LOCATION (decl),
214 option,
215 known_finite
216 ? G_("function might be candidate for attribute %qs")
217 : G_("function might be candidate for attribute %qs"
218 " if it is known to return normally"), attrib_name);
219 return warned_about;
222 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
223 is true if the function is known to be finite. */
225 static void
226 warn_function_pure (tree decl, bool known_finite)
228 /* Declaring a void function pure makes no sense and is diagnosed
229 by -Wattributes because calling it would have no effect. */
230 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
231 return;
233 static hash_set<tree> *warned_about;
234 warned_about
235 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
236 known_finite, warned_about, "pure");
239 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
240 is true if the function is known to be finite. */
242 static void
243 warn_function_const (tree decl, bool known_finite)
245 /* Declaring a void function const makes no sense is diagnosed
246 by -Wattributes because calling it would have no effect. */
247 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
248 return;
250 static hash_set<tree> *warned_about;
251 warned_about
252 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
253 known_finite, warned_about, "const");
256 /* Emit suggestion about __attribute__((malloc)) for DECL. */
258 static void
259 warn_function_malloc (tree decl)
261 static hash_set<tree> *warned_about;
262 warned_about
263 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
264 true, warned_about, "malloc");
267 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
269 static void
270 warn_function_noreturn (tree decl)
272 tree original_decl = decl;
274 static hash_set<tree> *warned_about;
275 if (!lang_hooks.missing_noreturn_ok_p (decl)
276 && targetm.warn_func_return (decl))
277 warned_about
278 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
279 true, warned_about, "noreturn");
282 void
283 warn_function_cold (tree decl)
285 tree original_decl = decl;
287 static hash_set<tree> *warned_about;
288 warned_about
289 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
290 true, warned_about, "cold");
293 /* Check to see if the use (or definition when CHECKING_WRITE is true)
294 variable T is legal in a function that is either pure or const. */
296 static inline void
297 check_decl (funct_state local,
298 tree t, bool checking_write, bool ipa)
300 /* Do not want to do anything with volatile except mark any
301 function that uses one to be not const or pure. */
302 if (TREE_THIS_VOLATILE (t))
304 local->pure_const_state = IPA_NEITHER;
305 if (dump_file)
306 fprintf (dump_file, " Volatile operand is not const/pure\n");
307 return;
310 /* Do not care about a local automatic that is not static. */
311 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
312 return;
314 /* If the variable has the "used" attribute, treat it as if it had a
315 been touched by the devil. */
316 if (DECL_PRESERVE_P (t))
318 local->pure_const_state = IPA_NEITHER;
319 if (dump_file)
320 fprintf (dump_file, " Used static/global variable is not const/pure\n");
321 return;
324 /* In IPA mode we are not interested in checking actual loads and stores;
325 they will be processed at propagation time using ipa_ref. */
326 if (ipa)
327 return;
329 /* Since we have dealt with the locals and params cases above, if we
330 are CHECKING_WRITE, this cannot be a pure or constant
331 function. */
332 if (checking_write)
334 local->pure_const_state = IPA_NEITHER;
335 if (dump_file)
336 fprintf (dump_file, " static/global memory write is not const/pure\n");
337 return;
340 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
342 /* Readonly reads are safe. */
343 if (TREE_READONLY (t))
344 return; /* Read of a constant, do not change the function state. */
345 else
347 if (dump_file)
348 fprintf (dump_file, " global memory read is not const\n");
349 /* Just a regular read. */
350 if (local->pure_const_state == IPA_CONST)
351 local->pure_const_state = IPA_PURE;
354 else
356 /* Compilation level statics can be read if they are readonly
357 variables. */
358 if (TREE_READONLY (t))
359 return;
361 if (dump_file)
362 fprintf (dump_file, " static memory read is not const\n");
363 /* Just a regular read. */
364 if (local->pure_const_state == IPA_CONST)
365 local->pure_const_state = IPA_PURE;
370 /* Check to see if the use (or definition when CHECKING_WRITE is true)
371 variable T is legal in a function that is either pure or const. */
373 static inline void
374 check_op (funct_state local, tree t, bool checking_write)
376 t = get_base_address (t);
377 if (t && TREE_THIS_VOLATILE (t))
379 local->pure_const_state = IPA_NEITHER;
380 if (dump_file)
381 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
382 return;
384 else if (t
385 && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
386 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
387 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
389 if (dump_file)
390 fprintf (dump_file, " Indirect ref to local memory is OK\n");
391 return;
393 else if (checking_write)
395 local->pure_const_state = IPA_NEITHER;
396 if (dump_file)
397 fprintf (dump_file, " Indirect ref write is not const/pure\n");
398 return;
400 else
402 if (dump_file)
403 fprintf (dump_file, " Indirect ref read is not const\n");
404 if (local->pure_const_state == IPA_CONST)
405 local->pure_const_state = IPA_PURE;
409 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
411 static void
412 state_from_flags (enum pure_const_state_e *state, bool *looping,
413 int flags, bool cannot_lead_to_return)
415 *looping = false;
416 if (flags & ECF_LOOPING_CONST_OR_PURE)
418 *looping = true;
419 if (dump_file && (dump_flags & TDF_DETAILS))
420 fprintf (dump_file, " looping\n");
422 if (flags & ECF_CONST)
424 *state = IPA_CONST;
425 if (dump_file && (dump_flags & TDF_DETAILS))
426 fprintf (dump_file, " const\n");
428 else if (flags & ECF_PURE)
430 *state = IPA_PURE;
431 if (dump_file && (dump_flags & TDF_DETAILS))
432 fprintf (dump_file, " pure\n");
434 else if (cannot_lead_to_return)
436 *state = IPA_PURE;
437 *looping = true;
438 if (dump_file && (dump_flags & TDF_DETAILS))
439 fprintf (dump_file, " ignoring side effects->pure looping\n");
441 else
443 if (dump_file && (dump_flags & TDF_DETAILS))
444 fprintf (dump_file, " neither\n");
445 *state = IPA_NEITHER;
446 *looping = true;
450 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
451 into STATE and LOOPING better of the two variants.
452 Be sure to merge looping correctly. IPA_NEITHER functions
453 have looping 0 even if they don't have to return. */
455 static inline void
456 better_state (enum pure_const_state_e *state, bool *looping,
457 enum pure_const_state_e state2, bool looping2)
459 if (state2 < *state)
461 if (*state == IPA_NEITHER)
462 *looping = looping2;
463 else
464 *looping = MIN (*looping, looping2);
465 *state = state2;
467 else if (state2 != IPA_NEITHER)
468 *looping = MIN (*looping, looping2);
471 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
472 into STATE and LOOPING worse of the two variants.
473 N is the actual node called. */
475 static inline void
476 worse_state (enum pure_const_state_e *state, bool *looping,
477 enum pure_const_state_e state2, bool looping2,
478 struct symtab_node *from,
479 struct symtab_node *to)
481 /* Consider function:
483 bool a(int *p)
485 return *p==*p;
488 During early optimization we will turn this into:
490 bool a(int *p)
492 return true;
495 Now if this function will be detected as CONST however when interposed it
496 may end up being just pure. We always must assume the worst scenario here.
498 if (*state == IPA_CONST && state2 == IPA_CONST
499 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
501 if (dump_file && (dump_flags & TDF_DETAILS))
502 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
503 "bind to current def.\n", to->name ());
504 state2 = IPA_PURE;
506 *state = MAX (*state, state2);
507 *looping = MAX (*looping, looping2);
510 /* Recognize special cases of builtins that are by themselves not pure or const
511 but function using them is. */
512 static bool
513 special_builtin_state (enum pure_const_state_e *state, bool *looping,
514 tree callee)
516 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
517 switch (DECL_FUNCTION_CODE (callee))
519 case BUILT_IN_RETURN:
520 case BUILT_IN_UNREACHABLE:
521 CASE_BUILT_IN_ALLOCA:
522 case BUILT_IN_STACK_SAVE:
523 case BUILT_IN_STACK_RESTORE:
524 case BUILT_IN_EH_POINTER:
525 case BUILT_IN_EH_FILTER:
526 case BUILT_IN_UNWIND_RESUME:
527 case BUILT_IN_CXA_END_CLEANUP:
528 case BUILT_IN_EH_COPY_VALUES:
529 case BUILT_IN_FRAME_ADDRESS:
530 case BUILT_IN_APPLY:
531 case BUILT_IN_APPLY_ARGS:
532 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
533 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
534 *looping = false;
535 *state = IPA_CONST;
536 return true;
537 case BUILT_IN_PREFETCH:
538 *looping = true;
539 *state = IPA_CONST;
540 return true;
541 default:
542 break;
544 return false;
547 /* Check the parameters of a function call to CALL_EXPR to see if
548 there are any references in the parameters that are not allowed for
549 pure or const functions. Also check to see if this is either an
550 indirect call, a call outside the compilation unit, or has special
551 attributes that may also effect the purity. The CALL_EXPR node for
552 the entire call expression. */
554 static void
555 check_call (funct_state local, gcall *call, bool ipa)
557 int flags = gimple_call_flags (call);
558 tree callee_t = gimple_call_fndecl (call);
559 bool possibly_throws = stmt_could_throw_p (cfun, call);
560 bool possibly_throws_externally = (possibly_throws
561 && stmt_can_throw_external (cfun, call));
563 if (possibly_throws)
565 unsigned int i;
566 for (i = 0; i < gimple_num_ops (call); i++)
567 if (gimple_op (call, i)
568 && tree_could_throw_p (gimple_op (call, i)))
570 if (possibly_throws && cfun->can_throw_non_call_exceptions)
572 if (dump_file)
573 fprintf (dump_file, " operand can throw; looping\n");
574 local->looping = true;
576 if (possibly_throws_externally)
578 if (dump_file)
579 fprintf (dump_file, " operand can throw externally\n");
580 local->can_throw = true;
585 /* The const and pure flags are set by a variety of places in the
586 compiler (including here). If someone has already set the flags
587 for the callee, (such as for some of the builtins) we will use
588 them, otherwise we will compute our own information.
590 Const and pure functions have less clobber effects than other
591 functions so we process these first. Otherwise if it is a call
592 outside the compilation unit or an indirect call we punt. This
593 leaves local calls which will be processed by following the call
594 graph. */
595 if (callee_t)
597 enum pure_const_state_e call_state;
598 bool call_looping;
600 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
601 && !nonfreeing_call_p (call))
602 local->can_free = true;
604 if (special_builtin_state (&call_state, &call_looping, callee_t))
606 worse_state (&local->pure_const_state, &local->looping,
607 call_state, call_looping,
608 NULL, NULL);
609 return;
611 /* When bad things happen to bad functions, they cannot be const
612 or pure. */
613 if (setjmp_call_p (callee_t))
615 if (dump_file)
616 fprintf (dump_file, " setjmp is not const/pure\n");
617 local->looping = true;
618 local->pure_const_state = IPA_NEITHER;
621 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
622 switch (DECL_FUNCTION_CODE (callee_t))
624 case BUILT_IN_LONGJMP:
625 case BUILT_IN_NONLOCAL_GOTO:
626 if (dump_file)
627 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
628 local->pure_const_state = IPA_NEITHER;
629 local->looping = true;
630 break;
631 default:
632 break;
635 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
636 local->can_free = true;
638 /* When not in IPA mode, we can still handle self recursion. */
639 if (!ipa && callee_t
640 && recursive_call_p (current_function_decl, callee_t))
642 if (dump_file)
643 fprintf (dump_file, " Recursive call can loop.\n");
644 local->looping = true;
646 /* Either callee is unknown or we are doing local analysis.
647 Look to see if there are any bits available for the callee (such as by
648 declaration or because it is builtin) and process solely on the basis of
649 those bits. Handle internal calls always, those calls don't have
650 corresponding cgraph edges and thus aren't processed during
651 the propagation. */
652 else if (!ipa || gimple_call_internal_p (call))
654 enum pure_const_state_e call_state;
655 bool call_looping;
656 if (possibly_throws && cfun->can_throw_non_call_exceptions)
658 if (dump_file)
659 fprintf (dump_file, " can throw; looping\n");
660 local->looping = true;
662 if (possibly_throws_externally)
664 if (dump_file)
666 fprintf (dump_file, " can throw externally to lp %i\n",
667 lookup_stmt_eh_lp (call));
668 if (callee_t)
669 fprintf (dump_file, " callee:%s\n",
670 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
672 local->can_throw = true;
674 if (dump_file && (dump_flags & TDF_DETAILS))
675 fprintf (dump_file, " checking flags for call:");
676 state_from_flags (&call_state, &call_looping, flags,
677 ((flags & (ECF_NORETURN | ECF_NOTHROW))
678 == (ECF_NORETURN | ECF_NOTHROW))
679 || (!flag_exceptions && (flags & ECF_NORETURN)));
680 worse_state (&local->pure_const_state, &local->looping,
681 call_state, call_looping, NULL, NULL);
683 /* Direct functions calls are handled by IPA propagation. */
686 /* Wrapper around check_decl for loads in local more. */
688 static bool
689 check_load (gimple *, tree op, tree, void *data)
691 if (DECL_P (op))
692 check_decl ((funct_state)data, op, false, false);
693 else
694 check_op ((funct_state)data, op, false);
695 return false;
698 /* Wrapper around check_decl for stores in local more. */
700 static bool
701 check_store (gimple *, tree op, tree, void *data)
703 if (DECL_P (op))
704 check_decl ((funct_state)data, op, true, false);
705 else
706 check_op ((funct_state)data, op, true);
707 return false;
710 /* Wrapper around check_decl for loads in ipa mode. */
712 static bool
713 check_ipa_load (gimple *, tree op, tree, void *data)
715 if (DECL_P (op))
716 check_decl ((funct_state)data, op, false, true);
717 else
718 check_op ((funct_state)data, op, false);
719 return false;
722 /* Wrapper around check_decl for stores in ipa mode. */
724 static bool
725 check_ipa_store (gimple *, tree op, tree, void *data)
727 if (DECL_P (op))
728 check_decl ((funct_state)data, op, true, true);
729 else
730 check_op ((funct_state)data, op, true);
731 return false;
734 /* Look into pointer pointed to by GSIP and figure out what interesting side
735 effects it has. */
736 static void
737 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
739 gimple *stmt = gsi_stmt (*gsip);
741 if (is_gimple_debug (stmt))
742 return;
744 /* Do consider clobber as side effects before IPA, so we rather inline
745 C++ destructors and keep clobber semantics than eliminate them.
747 TODO: We may get smarter during early optimizations on these and let
748 functions containing only clobbers to be optimized more. This is a common
749 case of C++ destructors. */
751 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
752 return;
754 if (dump_file)
756 fprintf (dump_file, " scanning: ");
757 print_gimple_stmt (dump_file, stmt, 0);
760 if (gimple_has_volatile_ops (stmt)
761 && !gimple_clobber_p (stmt))
763 local->pure_const_state = IPA_NEITHER;
764 if (dump_file)
765 fprintf (dump_file, " Volatile stmt is not const/pure\n");
768 /* Look for loads and stores. */
769 walk_stmt_load_store_ops (stmt, local,
770 ipa ? check_ipa_load : check_load,
771 ipa ? check_ipa_store : check_store);
773 if (gimple_code (stmt) != GIMPLE_CALL
774 && stmt_could_throw_p (cfun, stmt))
776 if (cfun->can_throw_non_call_exceptions)
778 if (dump_file)
779 fprintf (dump_file, " can throw; looping\n");
780 local->looping = true;
782 if (stmt_can_throw_external (cfun, stmt))
784 if (dump_file)
785 fprintf (dump_file, " can throw externally\n");
786 local->can_throw = true;
788 else
789 if (dump_file)
790 fprintf (dump_file, " can throw\n");
792 switch (gimple_code (stmt))
794 case GIMPLE_CALL:
795 check_call (local, as_a <gcall *> (stmt), ipa);
796 break;
797 case GIMPLE_LABEL:
798 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
799 /* Target of long jump. */
801 if (dump_file)
802 fprintf (dump_file, " nonlocal label is not const/pure\n");
803 local->pure_const_state = IPA_NEITHER;
805 break;
806 case GIMPLE_ASM:
807 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
809 if (dump_file)
810 fprintf (dump_file, " memory asm clobber is not const/pure\n");
811 /* Abandon all hope, ye who enter here. */
812 local->pure_const_state = IPA_NEITHER;
813 local->can_free = true;
815 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
817 if (dump_file)
818 fprintf (dump_file, " volatile is not const/pure\n");
819 /* Abandon all hope, ye who enter here. */
820 local->pure_const_state = IPA_NEITHER;
821 local->looping = true;
822 local->can_free = true;
824 return;
825 default:
826 break;
830 /* Check that RETVAL is used only in STMT and in comparisons against 0.
831 RETVAL is return value of the function and STMT is return stmt. */
833 static bool
834 check_retval_uses (tree retval, gimple *stmt)
836 imm_use_iterator use_iter;
837 gimple *use_stmt;
839 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
840 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
842 tree op2 = gimple_cond_rhs (cond);
843 if (!integer_zerop (op2))
844 RETURN_FROM_IMM_USE_STMT (use_iter, false);
846 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
848 enum tree_code code = gimple_assign_rhs_code (ga);
849 if (TREE_CODE_CLASS (code) != tcc_comparison)
850 RETURN_FROM_IMM_USE_STMT (use_iter, false);
851 if (!integer_zerop (gimple_assign_rhs2 (ga)))
852 RETURN_FROM_IMM_USE_STMT (use_iter, false);
854 else if (is_gimple_debug (use_stmt))
856 else if (use_stmt != stmt)
857 RETURN_FROM_IMM_USE_STMT (use_iter, false);
859 return true;
862 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
863 attribute. Currently this function does a very conservative analysis.
864 FUN is considered to be a candidate if
865 1) It returns a value of pointer type.
866 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
867 a phi, and element of phi is either NULL or
868 SSA_NAME_DEF_STMT(element) is function call.
869 3) The return-value has immediate uses only within comparisons (gcond or gassign)
870 and return_stmt (and likewise a phi arg has immediate use only within comparison
871 or the phi stmt). */
873 #define DUMP_AND_RETURN(reason) \
875 if (dump_file && (dump_flags & TDF_DETAILS)) \
876 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
877 (node->name()), (reason)); \
878 return false; \
881 static bool
882 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
883 bitmap visited)
885 cgraph_node *node = cgraph_node::get_create (fun->decl);
886 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
887 return true;
889 if (!check_retval_uses (retval, ret_stmt))
890 DUMP_AND_RETURN("Return value has uses outside return stmt"
891 " and comparisons against 0.")
893 gimple *def = SSA_NAME_DEF_STMT (retval);
895 if (gcall *call_stmt = dyn_cast<gcall *> (def))
897 tree callee_decl = gimple_call_fndecl (call_stmt);
898 if (!callee_decl)
899 return false;
901 if (!ipa && !DECL_IS_MALLOC (callee_decl))
902 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
903 " non-ipa mode.")
905 cgraph_edge *cs = node->get_edge (call_stmt);
906 if (cs)
908 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
909 es->is_return_callee_uncaptured = true;
913 else if (gphi *phi = dyn_cast<gphi *> (def))
915 bool all_args_zero = true;
916 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
918 tree arg = gimple_phi_arg_def (phi, i);
919 if (integer_zerop (arg))
920 continue;
922 all_args_zero = false;
923 if (TREE_CODE (arg) != SSA_NAME)
924 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
925 if (!check_retval_uses (arg, phi))
926 DUMP_AND_RETURN ("phi arg has uses outside phi"
927 " and comparisons against 0.")
929 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
930 if (is_a<gphi *> (arg_def))
932 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
933 DUMP_AND_RETURN ("nested phi fail")
934 continue;
937 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
938 if (!call_stmt)
939 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
941 tree callee_decl = gimple_call_fndecl (call_stmt);
942 if (!callee_decl)
943 return false;
944 if (!ipa && !DECL_IS_MALLOC (callee_decl))
945 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
946 " for non-ipa mode.")
948 cgraph_edge *cs = node->get_edge (call_stmt);
949 if (cs)
951 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
952 es->is_return_callee_uncaptured = true;
956 if (all_args_zero)
957 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
960 else
961 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
963 return true;
966 static bool
967 malloc_candidate_p (function *fun, bool ipa)
969 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
970 edge e;
971 edge_iterator ei;
972 cgraph_node *node = cgraph_node::get_create (fun->decl);
974 if (EDGE_COUNT (exit_block->preds) == 0
975 || !flag_delete_null_pointer_checks)
976 return false;
978 auto_bitmap visited;
979 FOR_EACH_EDGE (e, ei, exit_block->preds)
981 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
982 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
984 if (!ret_stmt)
985 return false;
987 tree retval = gimple_return_retval (ret_stmt);
988 if (!retval)
989 DUMP_AND_RETURN("No return value.")
991 if (TREE_CODE (retval) != SSA_NAME
992 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
993 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
995 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
996 return false;
999 if (dump_file && (dump_flags & TDF_DETAILS))
1000 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1001 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1002 return true;
1005 #undef DUMP_AND_RETURN
1007 /* This is the main routine for finding the reference patterns for
1008 global variables within a function FN. */
1010 static funct_state
1011 analyze_function (struct cgraph_node *fn, bool ipa)
1013 tree decl = fn->decl;
1014 funct_state l;
1015 basic_block this_block;
1017 l = XCNEW (struct funct_state_d);
1018 l->pure_const_state = IPA_CONST;
1019 l->state_previously_known = IPA_NEITHER;
1020 l->looping_previously_known = true;
1021 l->looping = false;
1022 l->can_throw = false;
1023 l->can_free = false;
1024 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1025 flags_from_decl_or_type (fn->decl),
1026 fn->cannot_return_p ());
1028 if (fn->thunk.thunk_p || fn->alias)
1030 /* Thunk gets propagated through, so nothing interesting happens. */
1031 gcc_assert (ipa);
1032 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1033 l->pure_const_state = IPA_NEITHER;
1034 return l;
1037 if (dump_file)
1039 fprintf (dump_file, "\n\n local analysis of %s\n ",
1040 fn->name ());
1043 push_cfun (DECL_STRUCT_FUNCTION (decl));
1045 FOR_EACH_BB_FN (this_block, cfun)
1047 gimple_stmt_iterator gsi;
1048 struct walk_stmt_info wi;
1050 memset (&wi, 0, sizeof (wi));
1051 for (gsi = gsi_start_bb (this_block);
1052 !gsi_end_p (gsi);
1053 gsi_next (&gsi))
1055 check_stmt (&gsi, l, ipa);
1056 if (l->pure_const_state == IPA_NEITHER
1057 && l->looping
1058 && l->can_throw
1059 && l->can_free)
1060 goto end;
1064 end:
1065 if (l->pure_const_state != IPA_NEITHER)
1067 /* Const functions cannot have back edges (an
1068 indication of possible infinite loop side
1069 effect. */
1070 if (mark_dfs_back_edges ())
1072 /* Preheaders are needed for SCEV to work.
1073 Simple latches and recorded exits improve chances that loop will
1074 proved to be finite in testcases such as in loop-15.c
1075 and loop-24.c */
1076 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1077 | LOOPS_HAVE_SIMPLE_LATCHES
1078 | LOOPS_HAVE_RECORDED_EXITS);
1079 if (dump_file && (dump_flags & TDF_DETAILS))
1080 flow_loops_dump (dump_file, NULL, 0);
1081 if (mark_irreducible_loops ())
1083 if (dump_file)
1084 fprintf (dump_file, " has irreducible loops\n");
1085 l->looping = true;
1087 else
1089 struct loop *loop;
1090 scev_initialize ();
1091 FOR_EACH_LOOP (loop, 0)
1092 if (!finite_loop_p (loop))
1094 if (dump_file)
1095 fprintf (dump_file, " cannot prove finiteness of "
1096 "loop %i\n", loop->num);
1097 l->looping =true;
1098 break;
1100 scev_finalize ();
1102 loop_optimizer_finalize ();
1106 if (dump_file && (dump_flags & TDF_DETAILS))
1107 fprintf (dump_file, " checking previously known:");
1109 better_state (&l->pure_const_state, &l->looping,
1110 l->state_previously_known,
1111 l->looping_previously_known);
1112 if (TREE_NOTHROW (decl))
1113 l->can_throw = false;
1115 l->malloc_state = STATE_MALLOC_BOTTOM;
1116 if (DECL_IS_MALLOC (decl))
1117 l->malloc_state = STATE_MALLOC;
1118 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1119 l->malloc_state = STATE_MALLOC_TOP;
1120 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1121 l->malloc_state = STATE_MALLOC;
1123 pop_cfun ();
1124 if (dump_file)
1126 if (l->looping)
1127 fprintf (dump_file, "Function is locally looping.\n");
1128 if (l->can_throw)
1129 fprintf (dump_file, "Function is locally throwing.\n");
1130 if (l->pure_const_state == IPA_CONST)
1131 fprintf (dump_file, "Function is locally const.\n");
1132 if (l->pure_const_state == IPA_PURE)
1133 fprintf (dump_file, "Function is locally pure.\n");
1134 if (l->can_free)
1135 fprintf (dump_file, "Function can locally free.\n");
1136 if (l->malloc_state == STATE_MALLOC)
1137 fprintf (dump_file, "Function is locally malloc.\n");
1139 return l;
1142 void
1143 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1145 /* There are some shared nodes, in particular the initializers on
1146 static declarations. We do not need to scan them more than once
1147 since all we would be interested in are the addressof
1148 operations. */
1149 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1151 funct_state_d *a = analyze_function (node, true);
1152 new (state) funct_state_d (*a);
1153 free (a);
1157 /* Called when new clone is inserted to callgraph late. */
1159 void
1160 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *,
1161 funct_state_d *src_data,
1162 funct_state_d *dst_data)
1164 new (dst_data) funct_state_d (*src_data);
1168 void
1169 pass_ipa_pure_const::
1170 register_hooks (void)
1172 if (init_p)
1173 return;
1175 init_p = true;
1177 funct_state_summaries = new funct_state_summary_t (symtab);
1181 /* Analyze each function in the cgraph to see if it is locally PURE or
1182 CONST. */
1184 static void
1185 pure_const_generate_summary (void)
1187 struct cgraph_node *node;
1189 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1190 pass->register_hooks ();
1192 /* Process all of the functions.
1194 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1195 by default, but the info can be used at LTO with -fwhole-program or
1196 when function got cloned and the clone is AVAILABLE. */
1198 FOR_EACH_DEFINED_FUNCTION (node)
1199 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1201 funct_state_d *a = analyze_function (node, true);
1202 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1203 free (a);
1208 /* Serialize the ipa info for lto. */
1210 static void
1211 pure_const_write_summary (void)
1213 struct cgraph_node *node;
1214 struct lto_simple_output_block *ob
1215 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1216 unsigned int count = 0;
1217 lto_symtab_encoder_iterator lsei;
1218 lto_symtab_encoder_t encoder;
1220 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1222 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1223 lsei_next_function_in_partition (&lsei))
1225 node = lsei_cgraph_node (lsei);
1226 if (node->definition && funct_state_summaries->exists (node))
1227 count++;
1230 streamer_write_uhwi_stream (ob->main_stream, count);
1232 /* Process all of the functions. */
1233 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1234 lsei_next_function_in_partition (&lsei))
1236 node = lsei_cgraph_node (lsei);
1237 funct_state_d *fs = funct_state_summaries->get (node);
1238 if (node->definition && fs != NULL)
1240 struct bitpack_d bp;
1241 int node_ref;
1242 lto_symtab_encoder_t encoder;
1244 encoder = ob->decl_state->symtab_node_encoder;
1245 node_ref = lto_symtab_encoder_encode (encoder, node);
1246 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1248 /* Note that flags will need to be read in the opposite
1249 order as we are pushing the bitflags into FLAGS. */
1250 bp = bitpack_create (ob->main_stream);
1251 bp_pack_value (&bp, fs->pure_const_state, 2);
1252 bp_pack_value (&bp, fs->state_previously_known, 2);
1253 bp_pack_value (&bp, fs->looping_previously_known, 1);
1254 bp_pack_value (&bp, fs->looping, 1);
1255 bp_pack_value (&bp, fs->can_throw, 1);
1256 bp_pack_value (&bp, fs->can_free, 1);
1257 bp_pack_value (&bp, fs->malloc_state, 2);
1258 streamer_write_bitpack (&bp);
1262 lto_destroy_simple_output_block (ob);
1266 /* Deserialize the ipa info for lto. */
1268 static void
1269 pure_const_read_summary (void)
1271 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1272 struct lto_file_decl_data *file_data;
1273 unsigned int j = 0;
1275 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1276 pass->register_hooks ();
1278 while ((file_data = file_data_vec[j++]))
1280 const char *data;
1281 size_t len;
1282 struct lto_input_block *ib
1283 = lto_create_simple_input_block (file_data,
1284 LTO_section_ipa_pure_const,
1285 &data, &len);
1286 if (ib)
1288 unsigned int i;
1289 unsigned int count = streamer_read_uhwi (ib);
1291 for (i = 0; i < count; i++)
1293 unsigned int index;
1294 struct cgraph_node *node;
1295 struct bitpack_d bp;
1296 funct_state fs;
1297 lto_symtab_encoder_t encoder;
1299 index = streamer_read_uhwi (ib);
1300 encoder = file_data->symtab_node_encoder;
1301 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1302 index));
1304 fs = funct_state_summaries->get_create (node);
1305 /* Note that the flags must be read in the opposite
1306 order in which they were written (the bitflags were
1307 pushed into FLAGS). */
1308 bp = streamer_read_bitpack (ib);
1309 fs->pure_const_state
1310 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1311 fs->state_previously_known
1312 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1313 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1314 fs->looping = bp_unpack_value (&bp, 1);
1315 fs->can_throw = bp_unpack_value (&bp, 1);
1316 fs->can_free = bp_unpack_value (&bp, 1);
1317 fs->malloc_state
1318 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1320 if (dump_file)
1322 int flags = flags_from_decl_or_type (node->decl);
1323 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1324 if (flags & ECF_CONST)
1325 fprintf (dump_file, " const");
1326 if (flags & ECF_PURE)
1327 fprintf (dump_file, " pure");
1328 if (flags & ECF_NOTHROW)
1329 fprintf (dump_file, " nothrow");
1330 fprintf (dump_file, "\n pure const state: %s\n",
1331 pure_const_names[fs->pure_const_state]);
1332 fprintf (dump_file, " previously known state: %s\n",
1333 pure_const_names[fs->state_previously_known]);
1334 if (fs->looping)
1335 fprintf (dump_file," function is locally looping\n");
1336 if (fs->looping_previously_known)
1337 fprintf (dump_file," function is previously known looping\n");
1338 if (fs->can_throw)
1339 fprintf (dump_file," function is locally throwing\n");
1340 if (fs->can_free)
1341 fprintf (dump_file," function can locally free\n");
1342 fprintf (dump_file, "\n malloc state: %s\n",
1343 malloc_state_names[fs->malloc_state]);
1347 lto_destroy_simple_input_block (file_data,
1348 LTO_section_ipa_pure_const,
1349 ib, data, len);
1354 /* We only propagate across edges that can throw externally and their callee
1355 is not interposable. */
1357 static bool
1358 ignore_edge_for_nothrow (struct cgraph_edge *e)
1360 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1361 return true;
1363 enum availability avail;
1364 cgraph_node *n = e->callee->function_or_virtual_thunk_symbol (&avail,
1365 e->caller);
1366 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (n->decl))
1367 return true;
1368 return opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1369 && !e->callee->binds_to_current_def_p (e->caller);
1372 /* Return true if NODE is self recursive function.
1373 Indirectly recursive functions appears as non-trivial strongly
1374 connected components, so we need to care about self recursion
1375 only. */
1377 static bool
1378 self_recursive_p (struct cgraph_node *node)
1380 struct cgraph_edge *e;
1381 for (e = node->callees; e; e = e->next_callee)
1382 if (e->callee->function_symbol () == node)
1383 return true;
1384 return false;
1387 /* Return true if N is cdtor that is not const or pure. In this case we may
1388 need to remove unreachable function if it is marked const/pure. */
1390 static bool
1391 cdtor_p (cgraph_node *n, void *)
1393 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1394 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1395 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1396 return false;
1399 /* We only propagate across edges with non-interposable callee. */
1401 static bool
1402 ignore_edge_for_pure_const (struct cgraph_edge *e)
1404 enum availability avail;
1405 e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1406 return (avail <= AVAIL_INTERPOSABLE);
1410 /* Produce transitive closure over the callgraph and compute pure/const
1411 attributes. */
1413 static bool
1414 propagate_pure_const (void)
1416 struct cgraph_node *node;
1417 struct cgraph_node *w;
1418 struct cgraph_node **order =
1419 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1420 int order_pos;
1421 int i;
1422 struct ipa_dfs_info * w_info;
1423 bool remove_p = false;
1424 bool has_cdtor;
1426 order_pos = ipa_reduced_postorder (order, true,
1427 ignore_edge_for_pure_const);
1428 if (dump_file)
1430 cgraph_node::dump_cgraph (dump_file);
1431 ipa_print_order (dump_file, "reduced", order, order_pos);
1434 /* Propagate the local information through the call graph to produce
1435 the global information. All the nodes within a cycle will have
1436 the same info so we collapse cycles first. Then we can do the
1437 propagation in one pass from the leaves to the roots. */
1438 for (i = 0; i < order_pos; i++ )
1440 enum pure_const_state_e pure_const_state = IPA_CONST;
1441 bool looping = false;
1442 int count = 0;
1443 node = order[i];
1445 if (node->alias)
1446 continue;
1448 if (dump_file && (dump_flags & TDF_DETAILS))
1449 fprintf (dump_file, "Starting cycle\n");
1451 /* Find the worst state for any node in the cycle. */
1452 w = node;
1453 while (w && pure_const_state != IPA_NEITHER)
1455 struct cgraph_edge *e;
1456 struct cgraph_edge *ie;
1457 int i;
1458 struct ipa_ref *ref = NULL;
1460 funct_state w_l = funct_state_summaries->get_create (w);
1461 if (dump_file && (dump_flags & TDF_DETAILS))
1462 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1463 w->dump_name (),
1464 pure_const_names[w_l->pure_const_state],
1465 w_l->looping);
1467 /* First merge in function body properties.
1468 We are safe to pass NULL as FROM and TO because we will take care
1469 of possible interposition when walking callees. */
1470 worse_state (&pure_const_state, &looping,
1471 w_l->pure_const_state, w_l->looping,
1472 NULL, NULL);
1473 if (pure_const_state == IPA_NEITHER)
1474 break;
1476 count++;
1478 /* We consider recursive cycles as possibly infinite.
1479 This might be relaxed since infinite recursion leads to stack
1480 overflow. */
1481 if (count > 1)
1482 looping = true;
1484 /* Now walk the edges and merge in callee properties. */
1485 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1486 e = e->next_callee)
1488 enum availability avail;
1489 struct cgraph_node *y = e->callee->
1490 function_or_virtual_thunk_symbol (&avail,
1491 e->caller);
1492 enum pure_const_state_e edge_state = IPA_CONST;
1493 bool edge_looping = false;
1495 if (dump_file && (dump_flags & TDF_DETAILS))
1497 fprintf (dump_file, " Call to %s",
1498 e->callee->dump_name ());
1500 if (avail > AVAIL_INTERPOSABLE)
1502 funct_state y_l = funct_state_summaries->get_create (y);
1504 if (dump_file && (dump_flags & TDF_DETAILS))
1506 fprintf (dump_file,
1507 " state:%s looping:%i\n",
1508 pure_const_names[y_l->pure_const_state],
1509 y_l->looping);
1511 if (y_l->pure_const_state > IPA_PURE
1512 && e->cannot_lead_to_return_p ())
1514 if (dump_file && (dump_flags & TDF_DETAILS))
1515 fprintf (dump_file,
1516 " Ignoring side effects"
1517 " -> pure, looping\n");
1518 edge_state = IPA_PURE;
1519 edge_looping = true;
1521 else
1523 edge_state = y_l->pure_const_state;
1524 edge_looping = y_l->looping;
1527 else if (special_builtin_state (&edge_state, &edge_looping,
1528 y->decl))
1530 else
1531 state_from_flags (&edge_state, &edge_looping,
1532 flags_from_decl_or_type (y->decl),
1533 e->cannot_lead_to_return_p ());
1535 /* Merge the results with what we already know. */
1536 better_state (&edge_state, &edge_looping,
1537 w_l->state_previously_known,
1538 w_l->looping_previously_known);
1539 worse_state (&pure_const_state, &looping,
1540 edge_state, edge_looping, e->caller, e->callee);
1541 if (pure_const_state == IPA_NEITHER)
1542 break;
1545 /* Now process the indirect call. */
1546 for (ie = w->indirect_calls;
1547 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1549 enum pure_const_state_e edge_state = IPA_CONST;
1550 bool edge_looping = false;
1552 if (dump_file && (dump_flags & TDF_DETAILS))
1553 fprintf (dump_file, " Indirect call");
1554 state_from_flags (&edge_state, &edge_looping,
1555 ie->indirect_info->ecf_flags,
1556 ie->cannot_lead_to_return_p ());
1557 /* Merge the results with what we already know. */
1558 better_state (&edge_state, &edge_looping,
1559 w_l->state_previously_known,
1560 w_l->looping_previously_known);
1561 worse_state (&pure_const_state, &looping,
1562 edge_state, edge_looping, NULL, NULL);
1563 if (pure_const_state == IPA_NEITHER)
1564 break;
1567 /* And finally all loads and stores. */
1568 for (i = 0; w->iterate_reference (i, ref)
1569 && pure_const_state != IPA_NEITHER; i++)
1571 enum pure_const_state_e ref_state = IPA_CONST;
1572 bool ref_looping = false;
1573 switch (ref->use)
1575 case IPA_REF_LOAD:
1576 /* readonly reads are safe. */
1577 if (TREE_READONLY (ref->referred->decl))
1578 break;
1579 if (dump_file && (dump_flags & TDF_DETAILS))
1580 fprintf (dump_file, " nonreadonly global var read\n");
1581 ref_state = IPA_PURE;
1582 break;
1583 case IPA_REF_STORE:
1584 if (ref->cannot_lead_to_return ())
1585 break;
1586 ref_state = IPA_NEITHER;
1587 if (dump_file && (dump_flags & TDF_DETAILS))
1588 fprintf (dump_file, " global var write\n");
1589 break;
1590 case IPA_REF_ADDR:
1591 break;
1592 default:
1593 gcc_unreachable ();
1595 better_state (&ref_state, &ref_looping,
1596 w_l->state_previously_known,
1597 w_l->looping_previously_known);
1598 worse_state (&pure_const_state, &looping,
1599 ref_state, ref_looping, NULL, NULL);
1600 if (pure_const_state == IPA_NEITHER)
1601 break;
1603 w_info = (struct ipa_dfs_info *) w->aux;
1604 w = w_info->next_cycle;
1606 if (dump_file && (dump_flags & TDF_DETAILS))
1607 fprintf (dump_file, "Result %s looping %i\n",
1608 pure_const_names [pure_const_state],
1609 looping);
1611 /* Find the worst state of can_free for any node in the cycle. */
1612 bool can_free = false;
1613 w = node;
1614 while (w && !can_free)
1616 struct cgraph_edge *e;
1617 funct_state w_l = funct_state_summaries->get (w);
1619 if (w_l->can_free
1620 || w->get_availability () == AVAIL_INTERPOSABLE
1621 || w->indirect_calls)
1622 can_free = true;
1624 for (e = w->callees; e && !can_free; e = e->next_callee)
1626 enum availability avail;
1627 struct cgraph_node *y = e->callee->
1628 function_or_virtual_thunk_symbol (&avail,
1629 e->caller);
1631 if (avail > AVAIL_INTERPOSABLE)
1632 can_free = funct_state_summaries->get (y)->can_free;
1633 else
1634 can_free = true;
1636 w_info = (struct ipa_dfs_info *) w->aux;
1637 w = w_info->next_cycle;
1640 /* Copy back the region's pure_const_state which is shared by
1641 all nodes in the region. */
1642 w = node;
1643 while (w)
1645 funct_state w_l = funct_state_summaries->get (w);
1646 enum pure_const_state_e this_state = pure_const_state;
1647 bool this_looping = looping;
1649 w_l->can_free = can_free;
1650 w->nonfreeing_fn = !can_free;
1651 if (!can_free && dump_file)
1652 fprintf (dump_file, "Function found not to call free: %s\n",
1653 w->name ());
1655 if (w_l->state_previously_known != IPA_NEITHER
1656 && this_state > w_l->state_previously_known)
1658 this_state = w_l->state_previously_known;
1659 if (this_state == IPA_NEITHER)
1660 this_looping = w_l->looping_previously_known;
1662 if (!this_looping && self_recursive_p (w))
1663 this_looping = true;
1664 if (!w_l->looping_previously_known)
1665 this_looping = false;
1667 /* All nodes within a cycle share the same info. */
1668 w_l->pure_const_state = this_state;
1669 w_l->looping = this_looping;
1671 /* Inline clones share declaration with their offline copies;
1672 do not modify their declarations since the offline copy may
1673 be different. */
1674 if (!w->global.inlined_to)
1675 switch (this_state)
1677 case IPA_CONST:
1678 if (!TREE_READONLY (w->decl))
1680 warn_function_const (w->decl, !this_looping);
1681 if (dump_file)
1682 fprintf (dump_file, "Function found to be %sconst: %s\n",
1683 this_looping ? "looping " : "",
1684 w->name ());
1686 /* Turning constructor or destructor to non-looping const/pure
1687 enables us to possibly remove the function completely. */
1688 if (this_looping)
1689 has_cdtor = false;
1690 else
1691 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1692 NULL, true);
1693 if (w->set_const_flag (true, this_looping))
1695 if (dump_file)
1696 fprintf (dump_file,
1697 "Declaration updated to be %sconst: %s\n",
1698 this_looping ? "looping " : "",
1699 w->name ());
1700 remove_p |= has_cdtor;
1702 break;
1704 case IPA_PURE:
1705 if (!DECL_PURE_P (w->decl))
1707 warn_function_pure (w->decl, !this_looping);
1708 if (dump_file)
1709 fprintf (dump_file, "Function found to be %spure: %s\n",
1710 this_looping ? "looping " : "",
1711 w->name ());
1713 if (this_looping)
1714 has_cdtor = false;
1715 else
1716 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1717 NULL, true);
1718 if (w->set_pure_flag (true, this_looping))
1720 if (dump_file)
1721 fprintf (dump_file,
1722 "Declaration updated to be %spure: %s\n",
1723 this_looping ? "looping " : "",
1724 w->name ());
1725 remove_p |= has_cdtor;
1727 break;
1729 default:
1730 break;
1732 w_info = (struct ipa_dfs_info *) w->aux;
1733 w = w_info->next_cycle;
1737 ipa_free_postorder_info ();
1738 free (order);
1739 return remove_p;
1742 /* Produce transitive closure over the callgraph and compute nothrow
1743 attributes. */
1745 static void
1746 propagate_nothrow (void)
1748 struct cgraph_node *node;
1749 struct cgraph_node *w;
1750 struct cgraph_node **order =
1751 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1752 int order_pos;
1753 int i;
1754 struct ipa_dfs_info * w_info;
1756 order_pos = ipa_reduced_postorder (order, true,
1757 ignore_edge_for_nothrow);
1758 if (dump_file)
1760 cgraph_node::dump_cgraph (dump_file);
1761 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1764 /* Propagate the local information through the call graph to produce
1765 the global information. All the nodes within a cycle will have
1766 the same info so we collapse cycles first. Then we can do the
1767 propagation in one pass from the leaves to the roots. */
1768 for (i = 0; i < order_pos; i++ )
1770 bool can_throw = false;
1771 node = order[i];
1773 if (node->alias)
1774 continue;
1776 /* Find the worst state for any node in the cycle. */
1777 w = node;
1778 while (w && !can_throw)
1780 struct cgraph_edge *e, *ie;
1782 if (!TREE_NOTHROW (w->decl))
1784 funct_state w_l = funct_state_summaries->get_create (w);
1786 if (w_l->can_throw
1787 || w->get_availability () == AVAIL_INTERPOSABLE)
1788 can_throw = true;
1790 for (e = w->callees; e && !can_throw; e = e->next_callee)
1792 enum availability avail;
1794 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1795 continue;
1797 struct cgraph_node *y = e->callee->
1798 function_or_virtual_thunk_symbol (&avail,
1799 e->caller);
1801 /* We can use info about the callee only if we know it
1802 cannot be interposed.
1803 When callee is compiled with non-call exceptions we also
1804 must check that the declaration is bound to current
1805 body as other semantically equivalent body may still
1806 throw. */
1807 if (avail <= AVAIL_INTERPOSABLE
1808 || (!TREE_NOTHROW (y->decl)
1809 && (funct_state_summaries->get_create (y)->can_throw
1810 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1811 && !e->callee->binds_to_current_def_p (w)))))
1812 can_throw = true;
1814 for (ie = w->indirect_calls; ie && !can_throw;
1815 ie = ie->next_callee)
1816 if (ie->can_throw_external
1817 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1818 can_throw = true;
1820 w_info = (struct ipa_dfs_info *) w->aux;
1821 w = w_info->next_cycle;
1824 /* Copy back the region's pure_const_state which is shared by
1825 all nodes in the region. */
1826 w = node;
1827 while (w)
1829 funct_state w_l = funct_state_summaries->get_create (w);
1830 if (!can_throw && !TREE_NOTHROW (w->decl))
1832 /* Inline clones share declaration with their offline copies;
1833 do not modify their declarations since the offline copy may
1834 be different. */
1835 if (!w->global.inlined_to)
1837 w->set_nothrow_flag (true);
1838 if (dump_file)
1839 fprintf (dump_file, "Function found to be nothrow: %s\n",
1840 w->name ());
1843 else if (can_throw && !TREE_NOTHROW (w->decl))
1844 w_l->can_throw = true;
1845 w_info = (struct ipa_dfs_info *) w->aux;
1846 w = w_info->next_cycle;
1850 ipa_free_postorder_info ();
1851 free (order);
1854 /* Debugging function to dump state of malloc lattice. */
1856 DEBUG_FUNCTION
1857 static void
1858 dump_malloc_lattice (FILE *dump_file, const char *s)
1860 if (!dump_file)
1861 return;
1863 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1864 cgraph_node *node;
1865 FOR_EACH_FUNCTION (node)
1867 funct_state fs = funct_state_summaries->get (node);
1868 if (fs)
1869 fprintf (dump_file, "%s: %s\n", node->name (),
1870 malloc_state_names[fs->malloc_state]);
1874 /* Propagate malloc attribute across the callgraph. */
1876 static void
1877 propagate_malloc (void)
1879 cgraph_node *node;
1880 FOR_EACH_FUNCTION (node)
1882 if (DECL_IS_MALLOC (node->decl))
1883 if (!funct_state_summaries->exists (node))
1885 funct_state fs = funct_state_summaries->get_create (node);
1886 fs->malloc_state = STATE_MALLOC;
1890 dump_malloc_lattice (dump_file, "Initial");
1891 struct cgraph_node **order
1892 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1893 int order_pos = ipa_reverse_postorder (order);
1894 bool changed = true;
1896 while (changed)
1898 changed = false;
1899 /* Walk in postorder. */
1900 for (int i = order_pos - 1; i >= 0; --i)
1902 cgraph_node *node = order[i];
1903 if (node->alias
1904 || !node->definition
1905 || !funct_state_summaries->exists (node))
1906 continue;
1908 funct_state l = funct_state_summaries->get (node);
1910 /* FIXME: add support for indirect-calls. */
1911 if (node->indirect_calls)
1913 l->malloc_state = STATE_MALLOC_BOTTOM;
1914 continue;
1917 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1919 l->malloc_state = STATE_MALLOC_BOTTOM;
1920 continue;
1923 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1924 continue;
1926 vec<cgraph_node *> callees = vNULL;
1927 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1929 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1930 if (es && es->is_return_callee_uncaptured)
1931 callees.safe_push (cs->callee);
1934 malloc_state_e new_state = l->malloc_state;
1935 for (unsigned j = 0; j < callees.length (); j++)
1937 cgraph_node *callee = callees[j];
1938 if (!funct_state_summaries->exists (node))
1940 new_state = STATE_MALLOC_BOTTOM;
1941 break;
1943 malloc_state_e callee_state
1944 = funct_state_summaries->get_create (callee)->malloc_state;
1945 if (new_state < callee_state)
1946 new_state = callee_state;
1948 if (new_state != l->malloc_state)
1950 changed = true;
1951 l->malloc_state = new_state;
1956 FOR_EACH_DEFINED_FUNCTION (node)
1957 if (funct_state_summaries->exists (node))
1959 funct_state l = funct_state_summaries->get (node);
1960 if (!node->alias
1961 && l->malloc_state == STATE_MALLOC
1962 && !node->global.inlined_to)
1964 if (dump_file && (dump_flags & TDF_DETAILS))
1965 fprintf (dump_file, "Function %s found to be malloc\n",
1966 node->name ());
1968 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1969 node->set_malloc_flag (true);
1970 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1971 warn_function_malloc (node->decl);
1975 dump_malloc_lattice (dump_file, "after propagation");
1976 ipa_free_postorder_info ();
1977 free (order);
1980 /* Produce the global information by preforming a transitive closure
1981 on the local information that was produced by generate_summary. */
1983 unsigned int
1984 pass_ipa_pure_const::
1985 execute (function *)
1987 bool remove_p;
1989 /* Nothrow makes more function to not lead to return and improve
1990 later analysis. */
1991 propagate_nothrow ();
1992 propagate_malloc ();
1993 remove_p = propagate_pure_const ();
1995 delete funct_state_summaries;
1996 return remove_p ? TODO_remove_functions : 0;
1999 static bool
2000 gate_pure_const (void)
2002 return flag_ipa_pure_const || in_lto_p;
2005 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2006 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2007 pure_const_generate_summary, /* generate_summary */
2008 pure_const_write_summary, /* write_summary */
2009 pure_const_read_summary, /* read_summary */
2010 NULL, /* write_optimization_summary */
2011 NULL, /* read_optimization_summary */
2012 NULL, /* stmt_fixup */
2013 0, /* function_transform_todo_flags_start */
2014 NULL, /* function_transform */
2015 NULL), /* variable_transform */
2016 init_p (false) {}
2018 ipa_opt_pass_d *
2019 make_pass_ipa_pure_const (gcc::context *ctxt)
2021 return new pass_ipa_pure_const (ctxt);
2024 /* Return true if function should be skipped for local pure const analysis. */
2026 static bool
2027 skip_function_for_local_pure_const (struct cgraph_node *node)
2029 /* Because we do not schedule pass_fixup_cfg over whole program after early
2030 optimizations we must not promote functions that are called by already
2031 processed functions. */
2033 if (function_called_by_processed_nodes_p ())
2035 if (dump_file)
2036 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2037 return true;
2039 /* Save some work and do not analyze functions which are interposable and
2040 do not have any non-interposable aliases. */
2041 if (node->get_availability () <= AVAIL_INTERPOSABLE
2042 && !node->has_aliases_p ())
2044 if (dump_file)
2045 fprintf (dump_file,
2046 "Function is interposable; not analyzing.\n");
2047 return true;
2049 return false;
2052 /* Simple local pass for pure const discovery reusing the analysis from
2053 ipa_pure_const. This pass is effective when executed together with
2054 other optimization passes in early optimization pass queue. */
2056 namespace {
2058 const pass_data pass_data_local_pure_const =
2060 GIMPLE_PASS, /* type */
2061 "local-pure-const", /* name */
2062 OPTGROUP_NONE, /* optinfo_flags */
2063 TV_IPA_PURE_CONST, /* tv_id */
2064 0, /* properties_required */
2065 0, /* properties_provided */
2066 0, /* properties_destroyed */
2067 0, /* todo_flags_start */
2068 0, /* todo_flags_finish */
2071 class pass_local_pure_const : public gimple_opt_pass
2073 public:
2074 pass_local_pure_const (gcc::context *ctxt)
2075 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2078 /* opt_pass methods: */
2079 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2080 virtual bool gate (function *) { return gate_pure_const (); }
2081 virtual unsigned int execute (function *);
2083 }; // class pass_local_pure_const
2085 unsigned int
2086 pass_local_pure_const::execute (function *fun)
2088 bool changed = false;
2089 funct_state l;
2090 bool skip;
2091 struct cgraph_node *node;
2093 node = cgraph_node::get (current_function_decl);
2094 skip = skip_function_for_local_pure_const (node);
2096 if (!warn_suggest_attribute_const
2097 && !warn_suggest_attribute_pure
2098 && skip)
2099 return 0;
2101 l = analyze_function (node, false);
2103 /* Do NORETURN discovery. */
2104 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2105 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2107 warn_function_noreturn (fun->decl);
2108 if (dump_file)
2109 fprintf (dump_file, "Function found to be noreturn: %s\n",
2110 current_function_name ());
2112 /* Update declaration and reduce profile to executed once. */
2113 TREE_THIS_VOLATILE (current_function_decl) = 1;
2114 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2115 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2117 changed = true;
2120 switch (l->pure_const_state)
2122 case IPA_CONST:
2123 if (!TREE_READONLY (current_function_decl))
2125 warn_function_const (current_function_decl, !l->looping);
2126 if (dump_file)
2127 fprintf (dump_file, "Function found to be %sconst: %s\n",
2128 l->looping ? "looping " : "",
2129 current_function_name ());
2131 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2132 && !l->looping)
2134 if (dump_file)
2135 fprintf (dump_file, "Function found to be non-looping: %s\n",
2136 current_function_name ());
2138 if (!skip && node->set_const_flag (true, l->looping))
2140 if (dump_file)
2141 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2142 l->looping ? "looping " : "",
2143 current_function_name ());
2144 changed = true;
2146 break;
2148 case IPA_PURE:
2149 if (!DECL_PURE_P (current_function_decl))
2151 warn_function_pure (current_function_decl, !l->looping);
2152 if (dump_file)
2153 fprintf (dump_file, "Function found to be %spure: %s\n",
2154 l->looping ? "looping " : "",
2155 current_function_name ());
2157 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2158 && !l->looping)
2160 if (dump_file)
2161 fprintf (dump_file, "Function found to be non-looping: %s\n",
2162 current_function_name ());
2164 if (!skip && node->set_pure_flag (true, l->looping))
2166 if (dump_file)
2167 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2168 l->looping ? "looping " : "",
2169 current_function_name ());
2170 changed = true;
2172 break;
2174 default:
2175 break;
2177 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2179 node->set_nothrow_flag (true);
2180 changed = true;
2181 if (dump_file)
2182 fprintf (dump_file, "Function found to be nothrow: %s\n",
2183 current_function_name ());
2186 if (l->malloc_state == STATE_MALLOC
2187 && !DECL_IS_MALLOC (current_function_decl))
2189 node->set_malloc_flag (true);
2190 if (warn_suggest_attribute_malloc)
2191 warn_function_malloc (node->decl);
2192 changed = true;
2193 if (dump_file)
2194 fprintf (dump_file, "Function found to be malloc: %s\n",
2195 node->name ());
2198 free (l);
2199 if (changed)
2200 return execute_fixup_cfg ();
2201 else
2202 return 0;
2205 } // anon namespace
2207 gimple_opt_pass *
2208 make_pass_local_pure_const (gcc::context *ctxt)
2210 return new pass_local_pure_const (ctxt);
2213 /* Emit noreturn warnings. */
2215 namespace {
2217 const pass_data pass_data_warn_function_noreturn =
2219 GIMPLE_PASS, /* type */
2220 "*warn_function_noreturn", /* name */
2221 OPTGROUP_NONE, /* optinfo_flags */
2222 TV_NONE, /* tv_id */
2223 PROP_cfg, /* properties_required */
2224 0, /* properties_provided */
2225 0, /* properties_destroyed */
2226 0, /* todo_flags_start */
2227 0, /* todo_flags_finish */
2230 class pass_warn_function_noreturn : public gimple_opt_pass
2232 public:
2233 pass_warn_function_noreturn (gcc::context *ctxt)
2234 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2237 /* opt_pass methods: */
2238 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2239 virtual unsigned int execute (function *fun)
2241 if (!TREE_THIS_VOLATILE (current_function_decl)
2242 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2243 warn_function_noreturn (current_function_decl);
2244 return 0;
2247 }; // class pass_warn_function_noreturn
2249 } // anon namespace
2251 gimple_opt_pass *
2252 make_pass_warn_function_noreturn (gcc::context *ctxt)
2254 return new pass_warn_function_noreturn (ctxt);
2257 /* Simple local pass for pure const discovery reusing the analysis from
2258 ipa_pure_const. This pass is effective when executed together with
2259 other optimization passes in early optimization pass queue. */
2261 namespace {
2263 const pass_data pass_data_nothrow =
2265 GIMPLE_PASS, /* type */
2266 "nothrow", /* name */
2267 OPTGROUP_NONE, /* optinfo_flags */
2268 TV_IPA_PURE_CONST, /* tv_id */
2269 0, /* properties_required */
2270 0, /* properties_provided */
2271 0, /* properties_destroyed */
2272 0, /* todo_flags_start */
2273 0, /* todo_flags_finish */
2276 class pass_nothrow : public gimple_opt_pass
2278 public:
2279 pass_nothrow (gcc::context *ctxt)
2280 : gimple_opt_pass (pass_data_nothrow, ctxt)
2283 /* opt_pass methods: */
2284 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2285 virtual bool gate (function *) { return optimize; }
2286 virtual unsigned int execute (function *);
2288 }; // class pass_nothrow
2290 unsigned int
2291 pass_nothrow::execute (function *)
2293 struct cgraph_node *node;
2294 basic_block this_block;
2296 if (TREE_NOTHROW (current_function_decl))
2297 return 0;
2299 node = cgraph_node::get (current_function_decl);
2301 /* We run during lowering, we cannot really use availability yet. */
2302 if (cgraph_node::get (current_function_decl)->get_availability ()
2303 <= AVAIL_INTERPOSABLE)
2305 if (dump_file)
2306 fprintf (dump_file, "Function is interposable;"
2307 " not analyzing.\n");
2308 return true;
2311 FOR_EACH_BB_FN (this_block, cfun)
2313 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2314 !gsi_end_p (gsi);
2315 gsi_next (&gsi))
2316 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2318 if (is_gimple_call (gsi_stmt (gsi)))
2320 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2321 if (callee_t && recursive_call_p (current_function_decl,
2322 callee_t))
2323 continue;
2326 if (dump_file)
2328 fprintf (dump_file, "Statement can throw: ");
2329 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2331 return 0;
2335 node->set_nothrow_flag (true);
2337 bool cfg_changed = false;
2338 if (self_recursive_p (node))
2339 FOR_EACH_BB_FN (this_block, cfun)
2340 if (gimple *g = last_stmt (this_block))
2341 if (is_gimple_call (g))
2343 tree callee_t = gimple_call_fndecl (g);
2344 if (callee_t
2345 && recursive_call_p (current_function_decl, callee_t)
2346 && maybe_clean_eh_stmt (g)
2347 && gimple_purge_dead_eh_edges (this_block))
2348 cfg_changed = true;
2351 if (dump_file)
2352 fprintf (dump_file, "Function found to be nothrow: %s\n",
2353 current_function_name ());
2354 return cfg_changed ? TODO_cleanup_cfg : 0;
2357 } // anon namespace
2359 gimple_opt_pass *
2360 make_pass_nothrow (gcc::context *ctxt)
2362 return new pass_nothrow (ctxt);