aix: Fix _STDC_FORMAT_MACROS in inttypes.h [PR97044]
[official-gcc.git] / gcc / ipa-pure-const.c
blobbdbccd010dcc48f0d97faa64a8bf3aa3c6f7d4d4
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
67 what is found. */
68 enum pure_const_state_e
70 IPA_CONST,
71 IPA_PURE,
72 IPA_NEITHER
75 static const char *pure_const_names[3] = {"const", "pure", "neither"};
77 enum malloc_state_e
79 STATE_MALLOC_TOP,
80 STATE_MALLOC,
81 STATE_MALLOC_BOTTOM
84 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
86 /* Holder for the const_state. There is one of these per function
87 decl. */
88 class funct_state_d
90 public:
91 funct_state_d (): pure_const_state (IPA_NEITHER),
92 state_previously_known (IPA_NEITHER), looping_previously_known (true),
93 looping (true), can_throw (true), can_free (true),
94 malloc_state (STATE_MALLOC_BOTTOM) {}
96 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
97 state_previously_known (s.state_previously_known),
98 looping_previously_known (s.looping_previously_known),
99 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
100 malloc_state (s.malloc_state) {}
102 /* See above. */
103 enum pure_const_state_e pure_const_state;
104 /* What user set here; we can be always sure about this. */
105 enum pure_const_state_e state_previously_known;
106 bool looping_previously_known;
108 /* True if the function could possibly infinite loop. There are a
109 lot of ways that this could be determined. We are pretty
110 conservative here. While it is possible to cse pure and const
111 calls, it is not legal to have dce get rid of the call if there
112 is a possibility that the call could infinite loop since this is
113 a behavioral change. */
114 bool looping;
116 bool can_throw;
118 /* If function can call free, munmap or otherwise make previously
119 non-trapping memory accesses trapping. */
120 bool can_free;
122 enum malloc_state_e malloc_state;
125 typedef class funct_state_d * funct_state;
127 /* The storage of the funct_state is abstracted because there is the
128 possibility that it may be desirable to move this to the cgraph
129 local info. */
131 class funct_state_summary_t:
132 public fast_function_summary <funct_state_d *, va_heap>
134 public:
135 funct_state_summary_t (symbol_table *symtab):
136 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
138 virtual void insert (cgraph_node *, funct_state_d *state);
139 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
140 funct_state_d *src_data,
141 funct_state_d *dst_data);
144 static funct_state_summary_t *funct_state_summaries = NULL;
146 static bool gate_pure_const (void);
148 namespace {
150 const pass_data pass_data_ipa_pure_const =
152 IPA_PASS, /* type */
153 "pure-const", /* name */
154 OPTGROUP_NONE, /* optinfo_flags */
155 TV_IPA_PURE_CONST, /* tv_id */
156 0, /* properties_required */
157 0, /* properties_provided */
158 0, /* properties_destroyed */
159 0, /* todo_flags_start */
160 0, /* todo_flags_finish */
163 class pass_ipa_pure_const : public ipa_opt_pass_d
165 public:
166 pass_ipa_pure_const(gcc::context *ctxt);
168 /* opt_pass methods: */
169 bool gate (function *) { return gate_pure_const (); }
170 unsigned int execute (function *fun);
172 void register_hooks (void);
174 private:
175 bool init_p;
176 }; // class pass_ipa_pure_const
178 } // anon namespace
180 /* Try to guess if function body will always be visible to compiler
181 when compiling the call and whether compiler will be able
182 to propagate the information by itself. */
184 static bool
185 function_always_visible_to_compiler_p (tree decl)
187 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
188 || DECL_COMDAT (decl));
191 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
192 is true if the function is known to be finite. The diagnostic is
193 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
194 OPTION, this function may initialize it and it is always returned
195 by the function. */
197 static hash_set<tree> *
198 suggest_attribute (int option, tree decl, bool known_finite,
199 hash_set<tree> *warned_about,
200 const char * attrib_name)
202 if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
203 return warned_about;
204 if (TREE_THIS_VOLATILE (decl)
205 || (known_finite && function_always_visible_to_compiler_p (decl)))
206 return warned_about;
208 if (!warned_about)
209 warned_about = new hash_set<tree>;
210 if (warned_about->contains (decl))
211 return warned_about;
212 warned_about->add (decl);
213 warning_at (DECL_SOURCE_LOCATION (decl),
214 option,
215 known_finite
216 ? G_("function might be candidate for attribute %qs")
217 : G_("function might be candidate for attribute %qs"
218 " if it is known to return normally"), attrib_name);
219 return warned_about;
222 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
223 is true if the function is known to be finite. */
225 static void
226 warn_function_pure (tree decl, bool known_finite)
228 /* Declaring a void function pure makes no sense and is diagnosed
229 by -Wattributes because calling it would have no effect. */
230 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
231 return;
233 static hash_set<tree> *warned_about;
234 warned_about
235 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
236 known_finite, warned_about, "pure");
239 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
240 is true if the function is known to be finite. */
242 static void
243 warn_function_const (tree decl, bool known_finite)
245 /* Declaring a void function const makes no sense is diagnosed
246 by -Wattributes because calling it would have no effect. */
247 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
248 return;
250 static hash_set<tree> *warned_about;
251 warned_about
252 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
253 known_finite, warned_about, "const");
256 /* Emit suggestion about __attribute__((malloc)) for DECL. */
258 static void
259 warn_function_malloc (tree decl)
261 static hash_set<tree> *warned_about;
262 warned_about
263 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
264 true, warned_about, "malloc");
267 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
269 static void
270 warn_function_noreturn (tree decl)
272 tree original_decl = decl;
274 static hash_set<tree> *warned_about;
275 if (!lang_hooks.missing_noreturn_ok_p (decl)
276 && targetm.warn_func_return (decl))
277 warned_about
278 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
279 true, warned_about, "noreturn");
282 void
283 warn_function_cold (tree decl)
285 tree original_decl = decl;
287 static hash_set<tree> *warned_about;
288 warned_about
289 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
290 true, warned_about, "cold");
293 /* Check to see if the use (or definition when CHECKING_WRITE is true)
294 variable T is legal in a function that is either pure or const. */
296 static inline void
297 check_decl (funct_state local,
298 tree t, bool checking_write, bool ipa)
300 /* Do not want to do anything with volatile except mark any
301 function that uses one to be not const or pure. */
302 if (TREE_THIS_VOLATILE (t))
304 local->pure_const_state = IPA_NEITHER;
305 if (dump_file)
306 fprintf (dump_file, " Volatile operand is not const/pure\n");
307 return;
310 /* Do not care about a local automatic that is not static. */
311 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
312 return;
314 /* If the variable has the "used" attribute, treat it as if it had a
315 been touched by the devil. */
316 if (DECL_PRESERVE_P (t))
318 local->pure_const_state = IPA_NEITHER;
319 if (dump_file)
320 fprintf (dump_file, " Used static/global variable is not const/pure\n");
321 return;
324 /* In IPA mode we are not interested in checking actual loads and stores;
325 they will be processed at propagation time using ipa_ref. */
326 if (ipa)
327 return;
329 /* Since we have dealt with the locals and params cases above, if we
330 are CHECKING_WRITE, this cannot be a pure or constant
331 function. */
332 if (checking_write)
334 local->pure_const_state = IPA_NEITHER;
335 if (dump_file)
336 fprintf (dump_file, " static/global memory write is not const/pure\n");
337 return;
340 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
342 /* Readonly reads are safe. */
343 if (TREE_READONLY (t))
344 return; /* Read of a constant, do not change the function state. */
345 else
347 if (dump_file)
348 fprintf (dump_file, " global memory read is not const\n");
349 /* Just a regular read. */
350 if (local->pure_const_state == IPA_CONST)
351 local->pure_const_state = IPA_PURE;
354 else
356 /* Compilation level statics can be read if they are readonly
357 variables. */
358 if (TREE_READONLY (t))
359 return;
361 if (dump_file)
362 fprintf (dump_file, " static memory read is not const\n");
363 /* Just a regular read. */
364 if (local->pure_const_state == IPA_CONST)
365 local->pure_const_state = IPA_PURE;
370 /* Check to see if the use (or definition when CHECKING_WRITE is true)
371 variable T is legal in a function that is either pure or const. */
373 static inline void
374 check_op (funct_state local, tree t, bool checking_write)
376 t = get_base_address (t);
377 if (t && TREE_THIS_VOLATILE (t))
379 local->pure_const_state = IPA_NEITHER;
380 if (dump_file)
381 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
382 return;
384 else if (refs_local_or_readonly_memory_p (t))
386 if (dump_file)
387 fprintf (dump_file, " Indirect ref to local or readonly "
388 "memory is OK\n");
389 return;
391 else if (checking_write)
393 local->pure_const_state = IPA_NEITHER;
394 if (dump_file)
395 fprintf (dump_file, " Indirect ref write is not const/pure\n");
396 return;
398 else
400 if (dump_file)
401 fprintf (dump_file, " Indirect ref read is not const\n");
402 if (local->pure_const_state == IPA_CONST)
403 local->pure_const_state = IPA_PURE;
407 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
409 static void
410 state_from_flags (enum pure_const_state_e *state, bool *looping,
411 int flags, bool cannot_lead_to_return)
413 *looping = false;
414 if (flags & ECF_LOOPING_CONST_OR_PURE)
416 *looping = true;
417 if (dump_file && (dump_flags & TDF_DETAILS))
418 fprintf (dump_file, " looping\n");
420 if (flags & ECF_CONST)
422 *state = IPA_CONST;
423 if (dump_file && (dump_flags & TDF_DETAILS))
424 fprintf (dump_file, " const\n");
426 else if (flags & ECF_PURE)
428 *state = IPA_PURE;
429 if (dump_file && (dump_flags & TDF_DETAILS))
430 fprintf (dump_file, " pure\n");
432 else if (cannot_lead_to_return)
434 *state = IPA_PURE;
435 *looping = true;
436 if (dump_file && (dump_flags & TDF_DETAILS))
437 fprintf (dump_file, " ignoring side effects->pure looping\n");
439 else
441 if (dump_file && (dump_flags & TDF_DETAILS))
442 fprintf (dump_file, " neither\n");
443 *state = IPA_NEITHER;
444 *looping = true;
448 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
449 into STATE and LOOPING better of the two variants.
450 Be sure to merge looping correctly. IPA_NEITHER functions
451 have looping 0 even if they don't have to return. */
453 static inline void
454 better_state (enum pure_const_state_e *state, bool *looping,
455 enum pure_const_state_e state2, bool looping2)
457 if (state2 < *state)
459 if (*state == IPA_NEITHER)
460 *looping = looping2;
461 else
462 *looping = MIN (*looping, looping2);
463 *state = state2;
465 else if (state2 != IPA_NEITHER)
466 *looping = MIN (*looping, looping2);
469 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
470 into STATE and LOOPING worse of the two variants.
471 N is the actual node called. */
473 static inline void
474 worse_state (enum pure_const_state_e *state, bool *looping,
475 enum pure_const_state_e state2, bool looping2,
476 struct symtab_node *from,
477 struct symtab_node *to)
479 /* Consider function:
481 bool a(int *p)
483 return *p==*p;
486 During early optimization we will turn this into:
488 bool a(int *p)
490 return true;
493 Now if this function will be detected as CONST however when interposed it
494 may end up being just pure. We always must assume the worst scenario here.
496 if (*state == IPA_CONST && state2 == IPA_CONST
497 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
499 if (dump_file && (dump_flags & TDF_DETAILS))
500 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
501 "bind to current def.\n", to->dump_name ());
502 state2 = IPA_PURE;
504 *state = MAX (*state, state2);
505 *looping = MAX (*looping, looping2);
508 /* Recognize special cases of builtins that are by themselves not pure or const
509 but function using them is. */
510 static bool
511 special_builtin_state (enum pure_const_state_e *state, bool *looping,
512 tree callee)
514 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
515 switch (DECL_FUNCTION_CODE (callee))
517 case BUILT_IN_RETURN:
518 case BUILT_IN_UNREACHABLE:
519 CASE_BUILT_IN_ALLOCA:
520 case BUILT_IN_STACK_SAVE:
521 case BUILT_IN_STACK_RESTORE:
522 case BUILT_IN_EH_POINTER:
523 case BUILT_IN_EH_FILTER:
524 case BUILT_IN_UNWIND_RESUME:
525 case BUILT_IN_CXA_END_CLEANUP:
526 case BUILT_IN_EH_COPY_VALUES:
527 case BUILT_IN_FRAME_ADDRESS:
528 case BUILT_IN_APPLY_ARGS:
529 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
530 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
531 *looping = false;
532 *state = IPA_CONST;
533 return true;
534 case BUILT_IN_PREFETCH:
535 *looping = true;
536 *state = IPA_CONST;
537 return true;
538 default:
539 break;
541 return false;
544 /* Check the parameters of a function call to CALL_EXPR to see if
545 there are any references in the parameters that are not allowed for
546 pure or const functions. Also check to see if this is either an
547 indirect call, a call outside the compilation unit, or has special
548 attributes that may also effect the purity. The CALL_EXPR node for
549 the entire call expression. */
551 static void
552 check_call (funct_state local, gcall *call, bool ipa)
554 int flags = gimple_call_flags (call);
555 tree callee_t = gimple_call_fndecl (call);
556 bool possibly_throws = stmt_could_throw_p (cfun, call);
557 bool possibly_throws_externally = (possibly_throws
558 && stmt_can_throw_external (cfun, call));
560 if (possibly_throws)
562 unsigned int i;
563 for (i = 0; i < gimple_num_ops (call); i++)
564 if (gimple_op (call, i)
565 && tree_could_throw_p (gimple_op (call, i)))
567 if (possibly_throws && cfun->can_throw_non_call_exceptions)
569 if (dump_file)
570 fprintf (dump_file, " operand can throw; looping\n");
571 local->looping = true;
573 if (possibly_throws_externally)
575 if (dump_file)
576 fprintf (dump_file, " operand can throw externally\n");
577 local->can_throw = true;
582 /* The const and pure flags are set by a variety of places in the
583 compiler (including here). If someone has already set the flags
584 for the callee, (such as for some of the builtins) we will use
585 them, otherwise we will compute our own information.
587 Const and pure functions have less clobber effects than other
588 functions so we process these first. Otherwise if it is a call
589 outside the compilation unit or an indirect call we punt. This
590 leaves local calls which will be processed by following the call
591 graph. */
592 if (callee_t)
594 enum pure_const_state_e call_state;
595 bool call_looping;
597 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
598 && !nonfreeing_call_p (call))
599 local->can_free = true;
601 if (special_builtin_state (&call_state, &call_looping, callee_t))
603 worse_state (&local->pure_const_state, &local->looping,
604 call_state, call_looping,
605 NULL, NULL);
606 return;
608 /* When bad things happen to bad functions, they cannot be const
609 or pure. */
610 if (setjmp_call_p (callee_t))
612 if (dump_file)
613 fprintf (dump_file, " setjmp is not const/pure\n");
614 local->looping = true;
615 local->pure_const_state = IPA_NEITHER;
618 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
619 switch (DECL_FUNCTION_CODE (callee_t))
621 case BUILT_IN_LONGJMP:
622 case BUILT_IN_NONLOCAL_GOTO:
623 if (dump_file)
624 fprintf (dump_file,
625 " longjmp and nonlocal goto is not const/pure\n");
626 local->pure_const_state = IPA_NEITHER;
627 local->looping = true;
628 break;
629 default:
630 break;
633 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
634 local->can_free = true;
636 /* When not in IPA mode, we can still handle self recursion. */
637 if (!ipa && callee_t
638 && recursive_call_p (current_function_decl, callee_t))
640 if (dump_file)
641 fprintf (dump_file, " Recursive call can loop.\n");
642 local->looping = true;
644 /* Either callee is unknown or we are doing local analysis.
645 Look to see if there are any bits available for the callee (such as by
646 declaration or because it is builtin) and process solely on the basis of
647 those bits. Handle internal calls always, those calls don't have
648 corresponding cgraph edges and thus aren't processed during
649 the propagation. */
650 else if (!ipa || gimple_call_internal_p (call))
652 enum pure_const_state_e call_state;
653 bool call_looping;
654 if (possibly_throws && cfun->can_throw_non_call_exceptions)
656 if (dump_file)
657 fprintf (dump_file, " can throw; looping\n");
658 local->looping = true;
660 if (possibly_throws_externally)
662 if (dump_file)
664 fprintf (dump_file, " can throw externally to lp %i\n",
665 lookup_stmt_eh_lp (call));
666 if (callee_t)
667 fprintf (dump_file, " callee:%s\n",
668 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
670 local->can_throw = true;
672 if (dump_file && (dump_flags & TDF_DETAILS))
673 fprintf (dump_file, " checking flags for call:");
674 state_from_flags (&call_state, &call_looping, flags,
675 ((flags & (ECF_NORETURN | ECF_NOTHROW))
676 == (ECF_NORETURN | ECF_NOTHROW))
677 || (!flag_exceptions && (flags & ECF_NORETURN)));
678 worse_state (&local->pure_const_state, &local->looping,
679 call_state, call_looping, NULL, NULL);
681 /* Direct functions calls are handled by IPA propagation. */
684 /* Wrapper around check_decl for loads in local more. */
686 static bool
687 check_load (gimple *, tree op, tree, void *data)
689 if (DECL_P (op))
690 check_decl ((funct_state)data, op, false, false);
691 else
692 check_op ((funct_state)data, op, false);
693 return false;
696 /* Wrapper around check_decl for stores in local more. */
698 static bool
699 check_store (gimple *, tree op, tree, void *data)
701 if (DECL_P (op))
702 check_decl ((funct_state)data, op, true, false);
703 else
704 check_op ((funct_state)data, op, true);
705 return false;
708 /* Wrapper around check_decl for loads in ipa mode. */
710 static bool
711 check_ipa_load (gimple *, tree op, tree, void *data)
713 if (DECL_P (op))
714 check_decl ((funct_state)data, op, false, true);
715 else
716 check_op ((funct_state)data, op, false);
717 return false;
720 /* Wrapper around check_decl for stores in ipa mode. */
722 static bool
723 check_ipa_store (gimple *, tree op, tree, void *data)
725 if (DECL_P (op))
726 check_decl ((funct_state)data, op, true, true);
727 else
728 check_op ((funct_state)data, op, true);
729 return false;
732 /* Look into pointer pointed to by GSIP and figure out what interesting side
733 effects it has. */
734 static void
735 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
737 gimple *stmt = gsi_stmt (*gsip);
739 if (is_gimple_debug (stmt))
740 return;
742 /* Do consider clobber as side effects before IPA, so we rather inline
743 C++ destructors and keep clobber semantics than eliminate them.
745 TODO: We may get smarter during early optimizations on these and let
746 functions containing only clobbers to be optimized more. This is a common
747 case of C++ destructors. */
749 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
750 return;
752 if (dump_file)
754 fprintf (dump_file, " scanning: ");
755 print_gimple_stmt (dump_file, stmt, 0);
758 if (gimple_has_volatile_ops (stmt)
759 && !gimple_clobber_p (stmt))
761 local->pure_const_state = IPA_NEITHER;
762 if (dump_file)
763 fprintf (dump_file, " Volatile stmt is not const/pure\n");
766 /* Look for loads and stores. */
767 walk_stmt_load_store_ops (stmt, local,
768 ipa ? check_ipa_load : check_load,
769 ipa ? check_ipa_store : check_store);
771 if (gimple_code (stmt) != GIMPLE_CALL
772 && stmt_could_throw_p (cfun, stmt))
774 if (cfun->can_throw_non_call_exceptions)
776 if (dump_file)
777 fprintf (dump_file, " can throw; looping\n");
778 local->looping = true;
780 if (stmt_can_throw_external (cfun, stmt))
782 if (dump_file)
783 fprintf (dump_file, " can throw externally\n");
784 local->can_throw = true;
786 else
787 if (dump_file)
788 fprintf (dump_file, " can throw\n");
790 switch (gimple_code (stmt))
792 case GIMPLE_CALL:
793 check_call (local, as_a <gcall *> (stmt), ipa);
794 break;
795 case GIMPLE_LABEL:
796 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
797 /* Target of long jump. */
799 if (dump_file)
800 fprintf (dump_file, " nonlocal label is not const/pure\n");
801 local->pure_const_state = IPA_NEITHER;
803 break;
804 case GIMPLE_ASM:
805 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
807 if (dump_file)
808 fprintf (dump_file, " memory asm clobber is not const/pure\n");
809 /* Abandon all hope, ye who enter here. */
810 local->pure_const_state = IPA_NEITHER;
811 local->can_free = true;
813 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
815 if (dump_file)
816 fprintf (dump_file, " volatile is not const/pure\n");
817 /* Abandon all hope, ye who enter here. */
818 local->pure_const_state = IPA_NEITHER;
819 local->looping = true;
820 local->can_free = true;
822 return;
823 default:
824 break;
828 /* Check that RETVAL is used only in STMT and in comparisons against 0.
829 RETVAL is return value of the function and STMT is return stmt. */
831 static bool
832 check_retval_uses (tree retval, gimple *stmt)
834 imm_use_iterator use_iter;
835 gimple *use_stmt;
837 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
838 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
840 tree op2 = gimple_cond_rhs (cond);
841 if (!integer_zerop (op2))
842 RETURN_FROM_IMM_USE_STMT (use_iter, false);
844 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
846 enum tree_code code = gimple_assign_rhs_code (ga);
847 if (TREE_CODE_CLASS (code) != tcc_comparison)
848 RETURN_FROM_IMM_USE_STMT (use_iter, false);
849 if (!integer_zerop (gimple_assign_rhs2 (ga)))
850 RETURN_FROM_IMM_USE_STMT (use_iter, false);
852 else if (is_gimple_debug (use_stmt))
854 else if (use_stmt != stmt)
855 RETURN_FROM_IMM_USE_STMT (use_iter, false);
857 return true;
860 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
861 attribute. Currently this function does a very conservative analysis.
862 FUN is considered to be a candidate if
863 1) It returns a value of pointer type.
864 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
865 a phi, and element of phi is either NULL or
866 SSA_NAME_DEF_STMT(element) is function call.
867 3) The return-value has immediate uses only within comparisons (gcond or gassign)
868 and return_stmt (and likewise a phi arg has immediate use only within comparison
869 or the phi stmt). */
871 #define DUMP_AND_RETURN(reason) \
873 if (dump_file && (dump_flags & TDF_DETAILS)) \
874 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
875 (node->dump_name ()), (reason)); \
876 return false; \
879 static bool
880 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
881 bitmap visited)
883 cgraph_node *node = cgraph_node::get_create (fun->decl);
884 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
885 return true;
887 if (!check_retval_uses (retval, ret_stmt))
888 DUMP_AND_RETURN("Return value has uses outside return stmt"
889 " and comparisons against 0.")
891 gimple *def = SSA_NAME_DEF_STMT (retval);
893 if (gcall *call_stmt = dyn_cast<gcall *> (def))
895 tree callee_decl = gimple_call_fndecl (call_stmt);
896 if (!callee_decl)
897 return false;
899 if (!ipa && !DECL_IS_MALLOC (callee_decl))
900 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
901 " non-ipa mode.")
903 cgraph_edge *cs = node->get_edge (call_stmt);
904 if (cs)
906 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
907 es->is_return_callee_uncaptured = true;
911 else if (gphi *phi = dyn_cast<gphi *> (def))
913 bool all_args_zero = true;
914 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
916 tree arg = gimple_phi_arg_def (phi, i);
917 if (integer_zerop (arg))
918 continue;
920 all_args_zero = false;
921 if (TREE_CODE (arg) != SSA_NAME)
922 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
923 if (!check_retval_uses (arg, phi))
924 DUMP_AND_RETURN ("phi arg has uses outside phi"
925 " and comparisons against 0.")
927 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
928 if (is_a<gphi *> (arg_def))
930 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
931 DUMP_AND_RETURN ("nested phi fail")
932 continue;
935 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
936 if (!call_stmt)
937 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
939 tree callee_decl = gimple_call_fndecl (call_stmt);
940 if (!callee_decl)
941 return false;
942 if (!ipa && !DECL_IS_MALLOC (callee_decl))
943 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
944 " for non-ipa mode.")
946 cgraph_edge *cs = node->get_edge (call_stmt);
947 if (cs)
949 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
950 es->is_return_callee_uncaptured = true;
954 if (all_args_zero)
955 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
958 else
959 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
961 return true;
964 static bool
965 malloc_candidate_p (function *fun, bool ipa)
967 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
968 edge e;
969 edge_iterator ei;
970 cgraph_node *node = cgraph_node::get_create (fun->decl);
972 if (EDGE_COUNT (exit_block->preds) == 0
973 || !flag_delete_null_pointer_checks)
974 return false;
976 auto_bitmap visited;
977 FOR_EACH_EDGE (e, ei, exit_block->preds)
979 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
980 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
982 if (!ret_stmt)
983 return false;
985 tree retval = gimple_return_retval (ret_stmt);
986 if (!retval)
987 DUMP_AND_RETURN("No return value.")
989 if (TREE_CODE (retval) != SSA_NAME
990 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
991 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
993 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
994 return false;
997 if (dump_file && (dump_flags & TDF_DETAILS))
998 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
999 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1000 return true;
1003 #undef DUMP_AND_RETURN
1005 /* This is the main routine for finding the reference patterns for
1006 global variables within a function FN. */
1008 static funct_state
1009 analyze_function (struct cgraph_node *fn, bool ipa)
1011 tree decl = fn->decl;
1012 funct_state l;
1013 basic_block this_block;
1015 l = XCNEW (class funct_state_d);
1016 l->pure_const_state = IPA_CONST;
1017 l->state_previously_known = IPA_NEITHER;
1018 l->looping_previously_known = true;
1019 l->looping = false;
1020 l->can_throw = false;
1021 l->can_free = false;
1022 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1023 flags_from_decl_or_type (fn->decl),
1024 fn->cannot_return_p ());
1026 if (fn->thunk.thunk_p || fn->alias)
1028 /* Thunk gets propagated through, so nothing interesting happens. */
1029 gcc_assert (ipa);
1030 if (fn->thunk.thunk_p && fn->thunk.virtual_offset_p)
1031 l->pure_const_state = IPA_NEITHER;
1032 return l;
1035 if (dump_file)
1037 fprintf (dump_file, "\n\n local analysis of %s\n ",
1038 fn->dump_name ());
1041 push_cfun (DECL_STRUCT_FUNCTION (decl));
1043 FOR_EACH_BB_FN (this_block, cfun)
1045 gimple_stmt_iterator gsi;
1046 struct walk_stmt_info wi;
1048 memset (&wi, 0, sizeof (wi));
1049 for (gsi = gsi_start_bb (this_block);
1050 !gsi_end_p (gsi);
1051 gsi_next (&gsi))
1053 check_stmt (&gsi, l, ipa);
1054 if (l->pure_const_state == IPA_NEITHER
1055 && l->looping
1056 && l->can_throw
1057 && l->can_free)
1058 goto end;
1062 end:
1063 if (l->pure_const_state != IPA_NEITHER)
1065 /* Const functions cannot have back edges (an
1066 indication of possible infinite loop side
1067 effect. */
1068 if (mark_dfs_back_edges ())
1070 /* Preheaders are needed for SCEV to work.
1071 Simple latches and recorded exits improve chances that loop will
1072 proved to be finite in testcases such as in loop-15.c
1073 and loop-24.c */
1074 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1075 | LOOPS_HAVE_SIMPLE_LATCHES
1076 | LOOPS_HAVE_RECORDED_EXITS);
1077 if (dump_file && (dump_flags & TDF_DETAILS))
1078 flow_loops_dump (dump_file, NULL, 0);
1079 if (mark_irreducible_loops ())
1081 if (dump_file)
1082 fprintf (dump_file, " has irreducible loops\n");
1083 l->looping = true;
1085 else
1087 class loop *loop;
1088 scev_initialize ();
1089 FOR_EACH_LOOP (loop, 0)
1090 if (!finite_loop_p (loop))
1092 if (dump_file)
1093 fprintf (dump_file, " cannot prove finiteness of "
1094 "loop %i\n", loop->num);
1095 l->looping =true;
1096 break;
1098 scev_finalize ();
1100 loop_optimizer_finalize ();
1104 if (dump_file && (dump_flags & TDF_DETAILS))
1105 fprintf (dump_file, " checking previously known:");
1107 better_state (&l->pure_const_state, &l->looping,
1108 l->state_previously_known,
1109 l->looping_previously_known);
1110 if (TREE_NOTHROW (decl))
1111 l->can_throw = false;
1113 l->malloc_state = STATE_MALLOC_BOTTOM;
1114 if (DECL_IS_MALLOC (decl))
1115 l->malloc_state = STATE_MALLOC;
1116 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1117 l->malloc_state = STATE_MALLOC_TOP;
1118 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1119 l->malloc_state = STATE_MALLOC;
1121 pop_cfun ();
1122 if (dump_file)
1124 if (l->looping)
1125 fprintf (dump_file, "Function is locally looping.\n");
1126 if (l->can_throw)
1127 fprintf (dump_file, "Function is locally throwing.\n");
1128 if (l->pure_const_state == IPA_CONST)
1129 fprintf (dump_file, "Function is locally const.\n");
1130 if (l->pure_const_state == IPA_PURE)
1131 fprintf (dump_file, "Function is locally pure.\n");
1132 if (l->can_free)
1133 fprintf (dump_file, "Function can locally free.\n");
1134 if (l->malloc_state == STATE_MALLOC)
1135 fprintf (dump_file, "Function is locally malloc.\n");
1137 return l;
1140 void
1141 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1143 /* There are some shared nodes, in particular the initializers on
1144 static declarations. We do not need to scan them more than once
1145 since all we would be interested in are the addressof
1146 operations. */
1147 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1149 funct_state_d *a = analyze_function (node, true);
1150 new (state) funct_state_d (*a);
1151 free (a);
1155 /* Called when new clone is inserted to callgraph late. */
1157 void
1158 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1159 funct_state_d *src_data,
1160 funct_state_d *dst_data)
1162 new (dst_data) funct_state_d (*src_data);
1163 if (dst_data->malloc_state == STATE_MALLOC
1164 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1165 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1169 void
1170 pass_ipa_pure_const::
1171 register_hooks (void)
1173 if (init_p)
1174 return;
1176 init_p = true;
1178 funct_state_summaries = new funct_state_summary_t (symtab);
1182 /* Analyze each function in the cgraph to see if it is locally PURE or
1183 CONST. */
1185 static void
1186 pure_const_generate_summary (void)
1188 struct cgraph_node *node;
1190 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1191 pass->register_hooks ();
1193 /* Process all of the functions.
1195 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1196 by default, but the info can be used at LTO with -fwhole-program or
1197 when function got cloned and the clone is AVAILABLE. */
1199 FOR_EACH_DEFINED_FUNCTION (node)
1200 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1202 funct_state_d *a = analyze_function (node, true);
1203 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1204 free (a);
1209 /* Serialize the ipa info for lto. */
1211 static void
1212 pure_const_write_summary (void)
1214 struct cgraph_node *node;
1215 struct lto_simple_output_block *ob
1216 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1217 unsigned int count = 0;
1218 lto_symtab_encoder_iterator lsei;
1219 lto_symtab_encoder_t encoder;
1221 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1223 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1224 lsei_next_function_in_partition (&lsei))
1226 node = lsei_cgraph_node (lsei);
1227 if (node->definition && funct_state_summaries->exists (node))
1228 count++;
1231 streamer_write_uhwi_stream (ob->main_stream, count);
1233 /* Process all of the functions. */
1234 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1235 lsei_next_function_in_partition (&lsei))
1237 node = lsei_cgraph_node (lsei);
1238 funct_state_d *fs = funct_state_summaries->get (node);
1239 if (node->definition && fs != NULL)
1241 struct bitpack_d bp;
1242 int node_ref;
1243 lto_symtab_encoder_t encoder;
1245 encoder = ob->decl_state->symtab_node_encoder;
1246 node_ref = lto_symtab_encoder_encode (encoder, node);
1247 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1249 /* Note that flags will need to be read in the opposite
1250 order as we are pushing the bitflags into FLAGS. */
1251 bp = bitpack_create (ob->main_stream);
1252 bp_pack_value (&bp, fs->pure_const_state, 2);
1253 bp_pack_value (&bp, fs->state_previously_known, 2);
1254 bp_pack_value (&bp, fs->looping_previously_known, 1);
1255 bp_pack_value (&bp, fs->looping, 1);
1256 bp_pack_value (&bp, fs->can_throw, 1);
1257 bp_pack_value (&bp, fs->can_free, 1);
1258 bp_pack_value (&bp, fs->malloc_state, 2);
1259 streamer_write_bitpack (&bp);
1263 lto_destroy_simple_output_block (ob);
1267 /* Deserialize the ipa info for lto. */
1269 static void
1270 pure_const_read_summary (void)
1272 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1273 struct lto_file_decl_data *file_data;
1274 unsigned int j = 0;
1276 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1277 pass->register_hooks ();
1279 while ((file_data = file_data_vec[j++]))
1281 const char *data;
1282 size_t len;
1283 class lto_input_block *ib
1284 = lto_create_simple_input_block (file_data,
1285 LTO_section_ipa_pure_const,
1286 &data, &len);
1287 if (ib)
1289 unsigned int i;
1290 unsigned int count = streamer_read_uhwi (ib);
1292 for (i = 0; i < count; i++)
1294 unsigned int index;
1295 struct cgraph_node *node;
1296 struct bitpack_d bp;
1297 funct_state fs;
1298 lto_symtab_encoder_t encoder;
1300 index = streamer_read_uhwi (ib);
1301 encoder = file_data->symtab_node_encoder;
1302 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1303 index));
1305 fs = funct_state_summaries->get_create (node);
1306 /* Note that the flags must be read in the opposite
1307 order in which they were written (the bitflags were
1308 pushed into FLAGS). */
1309 bp = streamer_read_bitpack (ib);
1310 fs->pure_const_state
1311 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1312 fs->state_previously_known
1313 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1314 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1315 fs->looping = bp_unpack_value (&bp, 1);
1316 fs->can_throw = bp_unpack_value (&bp, 1);
1317 fs->can_free = bp_unpack_value (&bp, 1);
1318 fs->malloc_state
1319 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1321 if (dump_file)
1323 int flags = flags_from_decl_or_type (node->decl);
1324 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1325 if (flags & ECF_CONST)
1326 fprintf (dump_file, " const");
1327 if (flags & ECF_PURE)
1328 fprintf (dump_file, " pure");
1329 if (flags & ECF_NOTHROW)
1330 fprintf (dump_file, " nothrow");
1331 fprintf (dump_file, "\n pure const state: %s\n",
1332 pure_const_names[fs->pure_const_state]);
1333 fprintf (dump_file, " previously known state: %s\n",
1334 pure_const_names[fs->state_previously_known]);
1335 if (fs->looping)
1336 fprintf (dump_file," function is locally looping\n");
1337 if (fs->looping_previously_known)
1338 fprintf (dump_file," function is previously known looping\n");
1339 if (fs->can_throw)
1340 fprintf (dump_file," function is locally throwing\n");
1341 if (fs->can_free)
1342 fprintf (dump_file," function can locally free\n");
1343 fprintf (dump_file, "\n malloc state: %s\n",
1344 malloc_state_names[fs->malloc_state]);
1348 lto_destroy_simple_input_block (file_data,
1349 LTO_section_ipa_pure_const,
1350 ib, data, len);
1355 /* We only propagate across edges that can throw externally and their callee
1356 is not interposable. */
1358 static bool
1359 ignore_edge_for_nothrow (struct cgraph_edge *e)
1361 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1362 return true;
1364 enum availability avail;
1365 cgraph_node *ultimate_target
1366 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1367 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1368 return true;
1369 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1370 && !e->callee->binds_to_current_def_p (e->caller))
1371 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1372 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1375 /* Return true if NODE is self recursive function.
1376 Indirectly recursive functions appears as non-trivial strongly
1377 connected components, so we need to care about self recursion
1378 only. */
1380 static bool
1381 self_recursive_p (struct cgraph_node *node)
1383 struct cgraph_edge *e;
1384 for (e = node->callees; e; e = e->next_callee)
1385 if (e->callee->function_symbol () == node)
1386 return true;
1387 return false;
1390 /* Return true if N is cdtor that is not const or pure. In this case we may
1391 need to remove unreachable function if it is marked const/pure. */
1393 static bool
1394 cdtor_p (cgraph_node *n, void *)
1396 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1397 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1398 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1399 return false;
1402 /* Skip edges from and to nodes without ipa_pure_const enabled.
1403 Ignore not available symbols. */
1405 static bool
1406 ignore_edge_for_pure_const (struct cgraph_edge *e)
1408 enum availability avail;
1409 cgraph_node *ultimate_target
1410 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1412 return (avail <= AVAIL_INTERPOSABLE
1413 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1414 || !opt_for_fn (ultimate_target->decl,
1415 flag_ipa_pure_const));
1418 /* Produce transitive closure over the callgraph and compute pure/const
1419 attributes. */
1421 static bool
1422 propagate_pure_const (void)
1424 struct cgraph_node *node;
1425 struct cgraph_node *w;
1426 struct cgraph_node **order =
1427 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1428 int order_pos;
1429 int i;
1430 struct ipa_dfs_info * w_info;
1431 bool remove_p = false;
1432 bool has_cdtor;
1434 order_pos = ipa_reduced_postorder (order, true,
1435 ignore_edge_for_pure_const);
1436 if (dump_file)
1438 cgraph_node::dump_cgraph (dump_file);
1439 ipa_print_order (dump_file, "reduced", order, order_pos);
1442 /* Propagate the local information through the call graph to produce
1443 the global information. All the nodes within a cycle will have
1444 the same info so we collapse cycles first. Then we can do the
1445 propagation in one pass from the leaves to the roots. */
1446 for (i = 0; i < order_pos; i++ )
1448 enum pure_const_state_e pure_const_state = IPA_CONST;
1449 bool looping = false;
1450 int count = 0;
1451 node = order[i];
1453 if (node->alias)
1454 continue;
1456 if (dump_file && (dump_flags & TDF_DETAILS))
1457 fprintf (dump_file, "Starting cycle\n");
1459 /* Find the worst state for any node in the cycle. */
1460 w = node;
1461 while (w && pure_const_state != IPA_NEITHER)
1463 struct cgraph_edge *e;
1464 struct cgraph_edge *ie;
1465 int i;
1466 struct ipa_ref *ref = NULL;
1468 funct_state w_l = funct_state_summaries->get_create (w);
1469 if (dump_file && (dump_flags & TDF_DETAILS))
1470 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1471 w->dump_name (),
1472 pure_const_names[w_l->pure_const_state],
1473 w_l->looping);
1475 /* First merge in function body properties.
1476 We are safe to pass NULL as FROM and TO because we will take care
1477 of possible interposition when walking callees. */
1478 worse_state (&pure_const_state, &looping,
1479 w_l->pure_const_state, w_l->looping,
1480 NULL, NULL);
1481 if (pure_const_state == IPA_NEITHER)
1482 break;
1484 count++;
1486 /* We consider recursive cycles as possibly infinite.
1487 This might be relaxed since infinite recursion leads to stack
1488 overflow. */
1489 if (count > 1)
1490 looping = true;
1492 /* Now walk the edges and merge in callee properties. */
1493 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1494 e = e->next_callee)
1496 enum availability avail;
1497 struct cgraph_node *y = e->callee->
1498 function_or_virtual_thunk_symbol (&avail,
1499 e->caller);
1500 enum pure_const_state_e edge_state = IPA_CONST;
1501 bool edge_looping = false;
1503 if (dump_file && (dump_flags & TDF_DETAILS))
1505 fprintf (dump_file, " Call to %s",
1506 e->callee->dump_name ());
1508 if (avail > AVAIL_INTERPOSABLE)
1510 funct_state y_l = funct_state_summaries->get_create (y);
1512 if (dump_file && (dump_flags & TDF_DETAILS))
1514 fprintf (dump_file,
1515 " state:%s looping:%i\n",
1516 pure_const_names[y_l->pure_const_state],
1517 y_l->looping);
1519 if (y_l->pure_const_state > IPA_PURE
1520 && e->cannot_lead_to_return_p ())
1522 if (dump_file && (dump_flags & TDF_DETAILS))
1523 fprintf (dump_file,
1524 " Ignoring side effects"
1525 " -> pure, looping\n");
1526 edge_state = IPA_PURE;
1527 edge_looping = true;
1529 else
1531 edge_state = y_l->pure_const_state;
1532 edge_looping = y_l->looping;
1535 else if (special_builtin_state (&edge_state, &edge_looping,
1536 y->decl))
1538 else
1539 state_from_flags (&edge_state, &edge_looping,
1540 flags_from_decl_or_type (y->decl),
1541 e->cannot_lead_to_return_p ());
1543 /* Merge the results with what we already know. */
1544 better_state (&edge_state, &edge_looping,
1545 w_l->state_previously_known,
1546 w_l->looping_previously_known);
1547 worse_state (&pure_const_state, &looping,
1548 edge_state, edge_looping, e->caller, e->callee);
1549 if (pure_const_state == IPA_NEITHER)
1550 break;
1553 /* Now process the indirect call. */
1554 for (ie = w->indirect_calls;
1555 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1557 enum pure_const_state_e edge_state = IPA_CONST;
1558 bool edge_looping = false;
1560 if (dump_file && (dump_flags & TDF_DETAILS))
1561 fprintf (dump_file, " Indirect call");
1562 state_from_flags (&edge_state, &edge_looping,
1563 ie->indirect_info->ecf_flags,
1564 ie->cannot_lead_to_return_p ());
1565 /* Merge the results with what we already know. */
1566 better_state (&edge_state, &edge_looping,
1567 w_l->state_previously_known,
1568 w_l->looping_previously_known);
1569 worse_state (&pure_const_state, &looping,
1570 edge_state, edge_looping, NULL, NULL);
1571 if (pure_const_state == IPA_NEITHER)
1572 break;
1575 /* And finally all loads and stores. */
1576 for (i = 0; w->iterate_reference (i, ref)
1577 && pure_const_state != IPA_NEITHER; i++)
1579 enum pure_const_state_e ref_state = IPA_CONST;
1580 bool ref_looping = false;
1581 switch (ref->use)
1583 case IPA_REF_LOAD:
1584 /* readonly reads are safe. */
1585 if (TREE_READONLY (ref->referred->decl))
1586 break;
1587 if (dump_file && (dump_flags & TDF_DETAILS))
1588 fprintf (dump_file, " nonreadonly global var read\n");
1589 ref_state = IPA_PURE;
1590 break;
1591 case IPA_REF_STORE:
1592 if (ref->cannot_lead_to_return ())
1593 break;
1594 ref_state = IPA_NEITHER;
1595 if (dump_file && (dump_flags & TDF_DETAILS))
1596 fprintf (dump_file, " global var write\n");
1597 break;
1598 case IPA_REF_ADDR:
1599 break;
1600 default:
1601 gcc_unreachable ();
1603 better_state (&ref_state, &ref_looping,
1604 w_l->state_previously_known,
1605 w_l->looping_previously_known);
1606 worse_state (&pure_const_state, &looping,
1607 ref_state, ref_looping, NULL, NULL);
1608 if (pure_const_state == IPA_NEITHER)
1609 break;
1611 w_info = (struct ipa_dfs_info *) w->aux;
1612 w = w_info->next_cycle;
1614 if (dump_file && (dump_flags & TDF_DETAILS))
1615 fprintf (dump_file, "Result %s looping %i\n",
1616 pure_const_names [pure_const_state],
1617 looping);
1619 /* Find the worst state of can_free for any node in the cycle. */
1620 bool can_free = false;
1621 w = node;
1622 while (w && !can_free)
1624 struct cgraph_edge *e;
1625 funct_state w_l = funct_state_summaries->get (w);
1627 if (w_l->can_free
1628 || w->get_availability () == AVAIL_INTERPOSABLE
1629 || w->indirect_calls)
1630 can_free = true;
1632 for (e = w->callees; e && !can_free; e = e->next_callee)
1634 enum availability avail;
1635 struct cgraph_node *y = e->callee->
1636 function_or_virtual_thunk_symbol (&avail,
1637 e->caller);
1639 if (avail > AVAIL_INTERPOSABLE)
1640 can_free = funct_state_summaries->get (y)->can_free;
1641 else
1642 can_free = true;
1644 w_info = (struct ipa_dfs_info *) w->aux;
1645 w = w_info->next_cycle;
1648 /* Copy back the region's pure_const_state which is shared by
1649 all nodes in the region. */
1650 w = node;
1651 while (w)
1653 funct_state w_l = funct_state_summaries->get (w);
1654 enum pure_const_state_e this_state = pure_const_state;
1655 bool this_looping = looping;
1657 w_l->can_free = can_free;
1658 w->nonfreeing_fn = !can_free;
1659 if (!can_free && dump_file)
1660 fprintf (dump_file, "Function found not to call free: %s\n",
1661 w->dump_name ());
1663 if (w_l->state_previously_known != IPA_NEITHER
1664 && this_state > w_l->state_previously_known)
1666 this_state = w_l->state_previously_known;
1667 if (this_state == IPA_NEITHER)
1668 this_looping = w_l->looping_previously_known;
1670 if (!this_looping && self_recursive_p (w))
1671 this_looping = true;
1672 if (!w_l->looping_previously_known)
1673 this_looping = false;
1675 /* All nodes within a cycle share the same info. */
1676 w_l->pure_const_state = this_state;
1677 w_l->looping = this_looping;
1679 /* Inline clones share declaration with their offline copies;
1680 do not modify their declarations since the offline copy may
1681 be different. */
1682 if (!w->inlined_to)
1683 switch (this_state)
1685 case IPA_CONST:
1686 if (!TREE_READONLY (w->decl))
1688 warn_function_const (w->decl, !this_looping);
1689 if (dump_file)
1690 fprintf (dump_file, "Function found to be %sconst: %s\n",
1691 this_looping ? "looping " : "",
1692 w->dump_name ());
1694 /* Turning constructor or destructor to non-looping const/pure
1695 enables us to possibly remove the function completely. */
1696 if (this_looping)
1697 has_cdtor = false;
1698 else
1699 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1700 NULL, true);
1701 if (w->set_const_flag (true, this_looping))
1703 if (dump_file)
1704 fprintf (dump_file,
1705 "Declaration updated to be %sconst: %s\n",
1706 this_looping ? "looping " : "",
1707 w->dump_name ());
1708 remove_p |= has_cdtor;
1710 break;
1712 case IPA_PURE:
1713 if (!DECL_PURE_P (w->decl))
1715 warn_function_pure (w->decl, !this_looping);
1716 if (dump_file)
1717 fprintf (dump_file, "Function found to be %spure: %s\n",
1718 this_looping ? "looping " : "",
1719 w->dump_name ());
1721 if (this_looping)
1722 has_cdtor = false;
1723 else
1724 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1725 NULL, true);
1726 if (w->set_pure_flag (true, this_looping))
1728 if (dump_file)
1729 fprintf (dump_file,
1730 "Declaration updated to be %spure: %s\n",
1731 this_looping ? "looping " : "",
1732 w->dump_name ());
1733 remove_p |= has_cdtor;
1735 break;
1737 default:
1738 break;
1740 w_info = (struct ipa_dfs_info *) w->aux;
1741 w = w_info->next_cycle;
1745 ipa_free_postorder_info ();
1746 free (order);
1747 return remove_p;
1750 /* Produce transitive closure over the callgraph and compute nothrow
1751 attributes. */
1753 static void
1754 propagate_nothrow (void)
1756 struct cgraph_node *node;
1757 struct cgraph_node *w;
1758 struct cgraph_node **order =
1759 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1760 int order_pos;
1761 int i;
1762 struct ipa_dfs_info * w_info;
1764 order_pos = ipa_reduced_postorder (order, true,
1765 ignore_edge_for_nothrow);
1766 if (dump_file)
1768 cgraph_node::dump_cgraph (dump_file);
1769 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1772 /* Propagate the local information through the call graph to produce
1773 the global information. All the nodes within a cycle will have
1774 the same info so we collapse cycles first. Then we can do the
1775 propagation in one pass from the leaves to the roots. */
1776 for (i = 0; i < order_pos; i++ )
1778 bool can_throw = false;
1779 node = order[i];
1781 if (node->alias)
1782 continue;
1784 /* Find the worst state for any node in the cycle. */
1785 w = node;
1786 while (w && !can_throw)
1788 struct cgraph_edge *e, *ie;
1790 if (!TREE_NOTHROW (w->decl))
1792 funct_state w_l = funct_state_summaries->get_create (w);
1794 if (w_l->can_throw
1795 || w->get_availability () == AVAIL_INTERPOSABLE)
1796 can_throw = true;
1798 for (e = w->callees; e && !can_throw; e = e->next_callee)
1800 enum availability avail;
1802 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1803 continue;
1805 struct cgraph_node *y = e->callee->
1806 function_or_virtual_thunk_symbol (&avail,
1807 e->caller);
1809 /* We can use info about the callee only if we know it
1810 cannot be interposed.
1811 When callee is compiled with non-call exceptions we also
1812 must check that the declaration is bound to current
1813 body as other semantically equivalent body may still
1814 throw. */
1815 if (avail <= AVAIL_INTERPOSABLE
1816 || (!TREE_NOTHROW (y->decl)
1817 && (funct_state_summaries->get_create (y)->can_throw
1818 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1819 && !e->callee->binds_to_current_def_p (w)))))
1820 can_throw = true;
1822 for (ie = w->indirect_calls; ie && !can_throw;
1823 ie = ie->next_callee)
1824 if (ie->can_throw_external
1825 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1826 can_throw = true;
1828 w_info = (struct ipa_dfs_info *) w->aux;
1829 w = w_info->next_cycle;
1832 /* Copy back the region's pure_const_state which is shared by
1833 all nodes in the region. */
1834 w = node;
1835 while (w)
1837 funct_state w_l = funct_state_summaries->get_create (w);
1838 if (!can_throw && !TREE_NOTHROW (w->decl))
1840 /* Inline clones share declaration with their offline copies;
1841 do not modify their declarations since the offline copy may
1842 be different. */
1843 if (!w->inlined_to)
1845 w->set_nothrow_flag (true);
1846 if (dump_file)
1847 fprintf (dump_file, "Function found to be nothrow: %s\n",
1848 w->dump_name ());
1851 else if (can_throw && !TREE_NOTHROW (w->decl))
1852 w_l->can_throw = true;
1853 w_info = (struct ipa_dfs_info *) w->aux;
1854 w = w_info->next_cycle;
1858 ipa_free_postorder_info ();
1859 free (order);
1862 /* Debugging function to dump state of malloc lattice. */
1864 DEBUG_FUNCTION
1865 static void
1866 dump_malloc_lattice (FILE *dump_file, const char *s)
1868 if (!dump_file)
1869 return;
1871 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1872 cgraph_node *node;
1873 FOR_EACH_FUNCTION (node)
1875 funct_state fs = funct_state_summaries->get (node);
1876 if (fs)
1877 fprintf (dump_file, "%s: %s\n", node->dump_name (),
1878 malloc_state_names[fs->malloc_state]);
1882 /* Propagate malloc attribute across the callgraph. */
1884 static void
1885 propagate_malloc (void)
1887 cgraph_node *node;
1888 FOR_EACH_FUNCTION (node)
1890 if (DECL_IS_MALLOC (node->decl))
1891 if (!funct_state_summaries->exists (node))
1893 funct_state fs = funct_state_summaries->get_create (node);
1894 fs->malloc_state = STATE_MALLOC;
1898 dump_malloc_lattice (dump_file, "Initial");
1899 struct cgraph_node **order
1900 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1901 int order_pos = ipa_reverse_postorder (order);
1902 bool changed = true;
1904 while (changed)
1906 changed = false;
1907 /* Walk in postorder. */
1908 for (int i = order_pos - 1; i >= 0; --i)
1910 cgraph_node *node = order[i];
1911 if (node->alias
1912 || !node->definition
1913 || !funct_state_summaries->exists (node))
1914 continue;
1916 funct_state l = funct_state_summaries->get (node);
1918 /* FIXME: add support for indirect-calls. */
1919 if (node->indirect_calls)
1921 l->malloc_state = STATE_MALLOC_BOTTOM;
1922 continue;
1925 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1927 l->malloc_state = STATE_MALLOC_BOTTOM;
1928 continue;
1931 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1932 continue;
1934 vec<cgraph_node *> callees = vNULL;
1935 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1937 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1938 if (es && es->is_return_callee_uncaptured)
1939 callees.safe_push (cs->callee);
1942 malloc_state_e new_state = l->malloc_state;
1943 for (unsigned j = 0; j < callees.length (); j++)
1945 cgraph_node *callee = callees[j];
1946 if (!funct_state_summaries->exists (node))
1948 new_state = STATE_MALLOC_BOTTOM;
1949 break;
1951 malloc_state_e callee_state
1952 = funct_state_summaries->get_create (callee)->malloc_state;
1953 if (new_state < callee_state)
1954 new_state = callee_state;
1956 if (new_state != l->malloc_state)
1958 changed = true;
1959 l->malloc_state = new_state;
1964 FOR_EACH_DEFINED_FUNCTION (node)
1965 if (funct_state_summaries->exists (node))
1967 funct_state l = funct_state_summaries->get (node);
1968 if (!node->alias
1969 && l->malloc_state == STATE_MALLOC
1970 && !node->inlined_to)
1972 if (dump_file && (dump_flags & TDF_DETAILS))
1973 fprintf (dump_file, "Function %s found to be malloc\n",
1974 node->dump_name ());
1976 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1977 node->set_malloc_flag (true);
1978 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1979 warn_function_malloc (node->decl);
1983 dump_malloc_lattice (dump_file, "after propagation");
1984 ipa_free_postorder_info ();
1985 free (order);
1988 /* Produce the global information by preforming a transitive closure
1989 on the local information that was produced by generate_summary. */
1991 unsigned int
1992 pass_ipa_pure_const::
1993 execute (function *)
1995 bool remove_p;
1997 /* Nothrow makes more function to not lead to return and improve
1998 later analysis. */
1999 propagate_nothrow ();
2000 propagate_malloc ();
2001 remove_p = propagate_pure_const ();
2003 delete funct_state_summaries;
2004 return remove_p ? TODO_remove_functions : 0;
2007 static bool
2008 gate_pure_const (void)
2010 return flag_ipa_pure_const || in_lto_p;
2013 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2014 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2015 pure_const_generate_summary, /* generate_summary */
2016 pure_const_write_summary, /* write_summary */
2017 pure_const_read_summary, /* read_summary */
2018 NULL, /* write_optimization_summary */
2019 NULL, /* read_optimization_summary */
2020 NULL, /* stmt_fixup */
2021 0, /* function_transform_todo_flags_start */
2022 NULL, /* function_transform */
2023 NULL), /* variable_transform */
2024 init_p (false) {}
2026 ipa_opt_pass_d *
2027 make_pass_ipa_pure_const (gcc::context *ctxt)
2029 return new pass_ipa_pure_const (ctxt);
2032 /* Return true if function should be skipped for local pure const analysis. */
2034 static bool
2035 skip_function_for_local_pure_const (struct cgraph_node *node)
2037 /* Because we do not schedule pass_fixup_cfg over whole program after early
2038 optimizations we must not promote functions that are called by already
2039 processed functions. */
2041 if (function_called_by_processed_nodes_p ())
2043 if (dump_file)
2044 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2045 return true;
2047 /* Save some work and do not analyze functions which are interposable and
2048 do not have any non-interposable aliases. */
2049 if (node->get_availability () <= AVAIL_INTERPOSABLE
2050 && !node->has_aliases_p ())
2052 if (dump_file)
2053 fprintf (dump_file,
2054 "Function is interposable; not analyzing.\n");
2055 return true;
2057 return false;
2060 /* Simple local pass for pure const discovery reusing the analysis from
2061 ipa_pure_const. This pass is effective when executed together with
2062 other optimization passes in early optimization pass queue. */
2064 namespace {
2066 const pass_data pass_data_local_pure_const =
2068 GIMPLE_PASS, /* type */
2069 "local-pure-const", /* name */
2070 OPTGROUP_NONE, /* optinfo_flags */
2071 TV_IPA_PURE_CONST, /* tv_id */
2072 0, /* properties_required */
2073 0, /* properties_provided */
2074 0, /* properties_destroyed */
2075 0, /* todo_flags_start */
2076 0, /* todo_flags_finish */
2079 class pass_local_pure_const : public gimple_opt_pass
2081 public:
2082 pass_local_pure_const (gcc::context *ctxt)
2083 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2086 /* opt_pass methods: */
2087 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2088 virtual bool gate (function *) { return gate_pure_const (); }
2089 virtual unsigned int execute (function *);
2091 }; // class pass_local_pure_const
2093 unsigned int
2094 pass_local_pure_const::execute (function *fun)
2096 bool changed = false;
2097 funct_state l;
2098 bool skip;
2099 struct cgraph_node *node;
2101 node = cgraph_node::get (current_function_decl);
2102 skip = skip_function_for_local_pure_const (node);
2104 if (!warn_suggest_attribute_const
2105 && !warn_suggest_attribute_pure
2106 && skip)
2107 return 0;
2109 l = analyze_function (node, false);
2111 /* Do NORETURN discovery. */
2112 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2113 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2115 warn_function_noreturn (fun->decl);
2116 if (dump_file)
2117 fprintf (dump_file, "Function found to be noreturn: %s\n",
2118 current_function_name ());
2120 /* Update declaration and reduce profile to executed once. */
2121 TREE_THIS_VOLATILE (current_function_decl) = 1;
2122 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2123 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2125 changed = true;
2128 switch (l->pure_const_state)
2130 case IPA_CONST:
2131 if (!TREE_READONLY (current_function_decl))
2133 warn_function_const (current_function_decl, !l->looping);
2134 if (dump_file)
2135 fprintf (dump_file, "Function found to be %sconst: %s\n",
2136 l->looping ? "looping " : "",
2137 current_function_name ());
2139 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2140 && !l->looping)
2142 if (dump_file)
2143 fprintf (dump_file, "Function found to be non-looping: %s\n",
2144 current_function_name ());
2146 if (!skip && node->set_const_flag (true, l->looping))
2148 if (dump_file)
2149 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2150 l->looping ? "looping " : "",
2151 current_function_name ());
2152 changed = true;
2154 break;
2156 case IPA_PURE:
2157 if (!DECL_PURE_P (current_function_decl))
2159 warn_function_pure (current_function_decl, !l->looping);
2160 if (dump_file)
2161 fprintf (dump_file, "Function found to be %spure: %s\n",
2162 l->looping ? "looping " : "",
2163 current_function_name ());
2165 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2166 && !l->looping)
2168 if (dump_file)
2169 fprintf (dump_file, "Function found to be non-looping: %s\n",
2170 current_function_name ());
2172 if (!skip && node->set_pure_flag (true, l->looping))
2174 if (dump_file)
2175 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2176 l->looping ? "looping " : "",
2177 current_function_name ());
2178 changed = true;
2180 break;
2182 default:
2183 break;
2185 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2187 node->set_nothrow_flag (true);
2188 changed = true;
2189 if (dump_file)
2190 fprintf (dump_file, "Function found to be nothrow: %s\n",
2191 current_function_name ());
2194 if (l->malloc_state == STATE_MALLOC
2195 && !DECL_IS_MALLOC (current_function_decl))
2197 node->set_malloc_flag (true);
2198 if (warn_suggest_attribute_malloc)
2199 warn_function_malloc (node->decl);
2200 changed = true;
2201 if (dump_file)
2202 fprintf (dump_file, "Function found to be malloc: %s\n",
2203 node->dump_name ());
2206 free (l);
2207 if (changed)
2208 return execute_fixup_cfg ();
2209 else
2210 return 0;
2213 } // anon namespace
2215 gimple_opt_pass *
2216 make_pass_local_pure_const (gcc::context *ctxt)
2218 return new pass_local_pure_const (ctxt);
2221 /* Emit noreturn warnings. */
2223 namespace {
2225 const pass_data pass_data_warn_function_noreturn =
2227 GIMPLE_PASS, /* type */
2228 "*warn_function_noreturn", /* name */
2229 OPTGROUP_NONE, /* optinfo_flags */
2230 TV_NONE, /* tv_id */
2231 PROP_cfg, /* properties_required */
2232 0, /* properties_provided */
2233 0, /* properties_destroyed */
2234 0, /* todo_flags_start */
2235 0, /* todo_flags_finish */
2238 class pass_warn_function_noreturn : public gimple_opt_pass
2240 public:
2241 pass_warn_function_noreturn (gcc::context *ctxt)
2242 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2245 /* opt_pass methods: */
2246 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2247 virtual unsigned int execute (function *fun)
2249 if (!TREE_THIS_VOLATILE (current_function_decl)
2250 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2251 warn_function_noreturn (current_function_decl);
2252 return 0;
2255 }; // class pass_warn_function_noreturn
2257 } // anon namespace
2259 gimple_opt_pass *
2260 make_pass_warn_function_noreturn (gcc::context *ctxt)
2262 return new pass_warn_function_noreturn (ctxt);
2265 /* Simple local pass for pure const discovery reusing the analysis from
2266 ipa_pure_const. This pass is effective when executed together with
2267 other optimization passes in early optimization pass queue. */
2269 namespace {
2271 const pass_data pass_data_nothrow =
2273 GIMPLE_PASS, /* type */
2274 "nothrow", /* name */
2275 OPTGROUP_NONE, /* optinfo_flags */
2276 TV_IPA_PURE_CONST, /* tv_id */
2277 0, /* properties_required */
2278 0, /* properties_provided */
2279 0, /* properties_destroyed */
2280 0, /* todo_flags_start */
2281 0, /* todo_flags_finish */
2284 class pass_nothrow : public gimple_opt_pass
2286 public:
2287 pass_nothrow (gcc::context *ctxt)
2288 : gimple_opt_pass (pass_data_nothrow, ctxt)
2291 /* opt_pass methods: */
2292 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2293 virtual bool gate (function *) { return optimize; }
2294 virtual unsigned int execute (function *);
2296 }; // class pass_nothrow
2298 unsigned int
2299 pass_nothrow::execute (function *)
2301 struct cgraph_node *node;
2302 basic_block this_block;
2304 if (TREE_NOTHROW (current_function_decl))
2305 return 0;
2307 node = cgraph_node::get (current_function_decl);
2309 /* We run during lowering, we cannot really use availability yet. */
2310 if (cgraph_node::get (current_function_decl)->get_availability ()
2311 <= AVAIL_INTERPOSABLE)
2313 if (dump_file)
2314 fprintf (dump_file, "Function is interposable;"
2315 " not analyzing.\n");
2316 return true;
2319 FOR_EACH_BB_FN (this_block, cfun)
2321 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2322 !gsi_end_p (gsi);
2323 gsi_next (&gsi))
2324 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2326 if (is_gimple_call (gsi_stmt (gsi)))
2328 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2329 if (callee_t && recursive_call_p (current_function_decl,
2330 callee_t))
2331 continue;
2334 if (dump_file)
2336 fprintf (dump_file, "Statement can throw: ");
2337 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2339 return 0;
2343 node->set_nothrow_flag (true);
2345 bool cfg_changed = false;
2346 if (self_recursive_p (node))
2347 FOR_EACH_BB_FN (this_block, cfun)
2348 if (gimple *g = last_stmt (this_block))
2349 if (is_gimple_call (g))
2351 tree callee_t = gimple_call_fndecl (g);
2352 if (callee_t
2353 && recursive_call_p (current_function_decl, callee_t)
2354 && maybe_clean_eh_stmt (g)
2355 && gimple_purge_dead_eh_edges (this_block))
2356 cfg_changed = true;
2359 if (dump_file)
2360 fprintf (dump_file, "Function found to be nothrow: %s\n",
2361 current_function_name ());
2362 return cfg_changed ? TODO_cleanup_cfg : 0;
2365 } // anon namespace
2367 gimple_opt_pass *
2368 make_pass_nothrow (gcc::context *ctxt)
2370 return new pass_nothrow (ctxt);