lto-wrapper: Truncate files using -truncate driver option [PR110710]
[official-gcc.git] / gcc / ipa-pure-const.cc
blobd285462b6cf147f4bc173e236f1bf1e5f9eda006
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "sreal.h"
63 #include "ipa-cp.h"
64 #include "ipa-prop.h"
65 #include "ipa-fnsummary.h"
66 #include "symtab-thunks.h"
67 #include "dbgcnt.h"
69 /* Lattice values for const and pure functions. Everything starts out
70 being const, then may drop to pure and then neither depending on
71 what is found. */
72 enum pure_const_state_e
74 IPA_CONST,
75 IPA_PURE,
76 IPA_NEITHER
79 static const char *pure_const_names[3] = {"const", "pure", "neither"};
81 enum malloc_state_e
83 STATE_MALLOC_TOP,
84 STATE_MALLOC,
85 STATE_MALLOC_BOTTOM
88 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
90 /* Holder for the const_state. There is one of these per function
91 decl. */
92 class funct_state_d
94 public:
95 funct_state_d (): pure_const_state (IPA_NEITHER),
96 state_previously_known (IPA_NEITHER), looping_previously_known (true),
97 looping (true), can_throw (true), can_free (true),
98 malloc_state (STATE_MALLOC_BOTTOM) {}
100 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
101 state_previously_known (s.state_previously_known),
102 looping_previously_known (s.looping_previously_known),
103 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
104 malloc_state (s.malloc_state) {}
106 /* See above. */
107 enum pure_const_state_e pure_const_state;
108 /* What user set here; we can be always sure about this. */
109 enum pure_const_state_e state_previously_known;
110 bool looping_previously_known;
112 /* True if the function could possibly infinite loop. There are a
113 lot of ways that this could be determined. We are pretty
114 conservative here. While it is possible to cse pure and const
115 calls, it is not legal to have dce get rid of the call if there
116 is a possibility that the call could infinite loop since this is
117 a behavioral change. */
118 bool looping;
120 bool can_throw;
122 /* If function can call free, munmap or otherwise make previously
123 non-trapping memory accesses trapping. */
124 bool can_free;
126 enum malloc_state_e malloc_state;
129 typedef class funct_state_d * funct_state;
131 /* The storage of the funct_state is abstracted because there is the
132 possibility that it may be desirable to move this to the cgraph
133 local info. */
135 class funct_state_summary_t:
136 public fast_function_summary <funct_state_d *, va_heap>
138 public:
139 funct_state_summary_t (symbol_table *symtab):
140 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
142 void insert (cgraph_node *, funct_state_d *state) final override;
143 void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
144 funct_state_d *src_data,
145 funct_state_d *dst_data) final override;
148 static funct_state_summary_t *funct_state_summaries = NULL;
150 static bool gate_pure_const (void);
152 namespace {
154 const pass_data pass_data_ipa_pure_const =
156 IPA_PASS, /* type */
157 "pure-const", /* name */
158 OPTGROUP_NONE, /* optinfo_flags */
159 TV_IPA_PURE_CONST, /* tv_id */
160 0, /* properties_required */
161 0, /* properties_provided */
162 0, /* properties_destroyed */
163 0, /* todo_flags_start */
164 0, /* todo_flags_finish */
167 class pass_ipa_pure_const : public ipa_opt_pass_d
169 public:
170 pass_ipa_pure_const(gcc::context *ctxt);
172 /* opt_pass methods: */
173 bool gate (function *) final override { return gate_pure_const (); }
174 unsigned int execute (function *fun) final override;
176 void register_hooks (void);
178 private:
179 bool init_p;
180 }; // class pass_ipa_pure_const
182 } // anon namespace
184 /* Try to guess if function body will always be visible to compiler
185 when compiling the call and whether compiler will be able
186 to propagate the information by itself. */
188 static bool
189 function_always_visible_to_compiler_p (tree decl)
191 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
192 || DECL_COMDAT (decl));
195 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
196 is true if the function is known to be finite. The diagnostic is
197 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
198 OPTION, this function may initialize it and it is always returned
199 by the function. */
201 static hash_set<tree> *
202 suggest_attribute (int option, tree decl, bool known_finite,
203 hash_set<tree> *warned_about,
204 const char * attrib_name)
206 if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
207 return warned_about;
208 if (TREE_THIS_VOLATILE (decl)
209 || (known_finite && function_always_visible_to_compiler_p (decl)))
210 return warned_about;
212 if (!warned_about)
213 warned_about = new hash_set<tree>;
214 if (warned_about->contains (decl))
215 return warned_about;
216 warned_about->add (decl);
217 warning_at (DECL_SOURCE_LOCATION (decl),
218 option,
219 known_finite
220 ? G_("function might be candidate for attribute %qs")
221 : G_("function might be candidate for attribute %qs"
222 " if it is known to return normally"), attrib_name);
223 return warned_about;
226 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
227 is true if the function is known to be finite. */
229 static void
230 warn_function_pure (tree decl, bool known_finite)
232 /* Declaring a void function pure makes no sense and is diagnosed
233 by -Wattributes because calling it would have no effect. */
234 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
235 return;
237 static hash_set<tree> *warned_about;
238 warned_about
239 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
240 known_finite, warned_about, "pure");
243 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
244 is true if the function is known to be finite. */
246 static void
247 warn_function_const (tree decl, bool known_finite)
249 /* Declaring a void function const makes no sense is diagnosed
250 by -Wattributes because calling it would have no effect. */
251 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
252 return;
254 static hash_set<tree> *warned_about;
255 warned_about
256 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
257 known_finite, warned_about, "const");
260 /* Emit suggestion about __attribute__((malloc)) for DECL. */
262 static void
263 warn_function_malloc (tree decl)
265 static hash_set<tree> *warned_about;
266 warned_about
267 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
268 true, warned_about, "malloc");
271 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
273 static void
274 warn_function_noreturn (tree decl)
276 tree original_decl = decl;
278 static hash_set<tree> *warned_about;
279 if (!lang_hooks.missing_noreturn_ok_p (decl)
280 && targetm.warn_func_return (decl))
281 warned_about
282 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
283 true, warned_about, "noreturn");
286 void
287 warn_function_cold (tree decl)
289 tree original_decl = decl;
291 static hash_set<tree> *warned_about;
292 warned_about
293 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
294 true, warned_about, "cold");
297 void
298 warn_function_returns_nonnull (tree decl)
300 static hash_set<tree> *warned_about;
301 warned_about
302 = suggest_attribute (OPT_Wsuggest_attribute_returns_nonnull, decl,
303 true, warned_about, "returns_nonnull");
306 /* Check to see if the use (or definition when CHECKING_WRITE is true)
307 variable T is legal in a function that is either pure or const. */
309 static inline void
310 check_decl (funct_state local,
311 tree t, bool checking_write, bool ipa)
313 /* Do not want to do anything with volatile except mark any
314 function that uses one to be not const or pure. */
315 if (TREE_THIS_VOLATILE (t))
317 local->pure_const_state = IPA_NEITHER;
318 if (dump_file)
319 fprintf (dump_file, " Volatile operand is not const/pure\n");
320 return;
323 /* Do not care about a local automatic that is not static. */
324 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
325 return;
327 /* If the variable has the "used" attribute, treat it as if it had a
328 been touched by the devil. */
329 if (DECL_PRESERVE_P (t))
331 local->pure_const_state = IPA_NEITHER;
332 if (dump_file)
333 fprintf (dump_file, " Used static/global variable is not const/pure\n");
334 return;
337 /* In IPA mode we are not interested in checking actual loads and stores;
338 they will be processed at propagation time using ipa_ref. */
339 if (ipa)
340 return;
342 /* Since we have dealt with the locals and params cases above, if we
343 are CHECKING_WRITE, this cannot be a pure or constant
344 function. */
345 if (checking_write)
347 local->pure_const_state = IPA_NEITHER;
348 if (dump_file)
349 fprintf (dump_file, " static/global memory write is not const/pure\n");
350 return;
353 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
355 /* Readonly reads are safe. */
356 if (TREE_READONLY (t))
357 return; /* Read of a constant, do not change the function state. */
358 else
360 if (dump_file)
361 fprintf (dump_file, " global memory read is not const\n");
362 /* Just a regular read. */
363 if (local->pure_const_state == IPA_CONST)
364 local->pure_const_state = IPA_PURE;
367 else
369 /* Compilation level statics can be read if they are readonly
370 variables. */
371 if (TREE_READONLY (t))
372 return;
374 if (dump_file)
375 fprintf (dump_file, " static memory read is not const\n");
376 /* Just a regular read. */
377 if (local->pure_const_state == IPA_CONST)
378 local->pure_const_state = IPA_PURE;
383 /* Check to see if the use (or definition when CHECKING_WRITE is true)
384 variable T is legal in a function that is either pure or const. */
386 static inline void
387 check_op (funct_state local, tree t, bool checking_write)
389 t = get_base_address (t);
390 if (t && TREE_THIS_VOLATILE (t))
392 local->pure_const_state = IPA_NEITHER;
393 if (dump_file)
394 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
395 return;
397 else if (refs_local_or_readonly_memory_p (t))
399 if (dump_file)
400 fprintf (dump_file, " Indirect ref to local or readonly "
401 "memory is OK\n");
402 return;
404 else if (checking_write)
406 local->pure_const_state = IPA_NEITHER;
407 if (dump_file)
408 fprintf (dump_file, " Indirect ref write is not const/pure\n");
409 return;
411 else
413 if (dump_file)
414 fprintf (dump_file, " Indirect ref read is not const\n");
415 if (local->pure_const_state == IPA_CONST)
416 local->pure_const_state = IPA_PURE;
420 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
422 static void
423 state_from_flags (enum pure_const_state_e *state, bool *looping,
424 int flags, bool cannot_lead_to_return)
426 *looping = false;
427 if (flags & ECF_LOOPING_CONST_OR_PURE)
429 *looping = true;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " looping\n");
433 if (flags & ECF_CONST)
435 *state = IPA_CONST;
436 if (dump_file && (dump_flags & TDF_DETAILS))
437 fprintf (dump_file, " const\n");
439 else if (flags & ECF_PURE)
441 *state = IPA_PURE;
442 if (dump_file && (dump_flags & TDF_DETAILS))
443 fprintf (dump_file, " pure\n");
445 else if (cannot_lead_to_return)
447 *state = IPA_PURE;
448 *looping = true;
449 if (dump_file && (dump_flags & TDF_DETAILS))
450 fprintf (dump_file, " ignoring side effects->pure looping\n");
452 else
454 if (dump_file && (dump_flags & TDF_DETAILS))
455 fprintf (dump_file, " neither\n");
456 *state = IPA_NEITHER;
457 *looping = true;
461 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
462 into STATE and LOOPING better of the two variants.
463 Be sure to merge looping correctly. IPA_NEITHER functions
464 have looping 0 even if they don't have to return. */
466 static inline void
467 better_state (enum pure_const_state_e *state, bool *looping,
468 enum pure_const_state_e state2, bool looping2)
470 if (state2 < *state)
472 if (*state == IPA_NEITHER)
473 *looping = looping2;
474 else
475 *looping = MIN (*looping, looping2);
476 *state = state2;
478 else if (state2 != IPA_NEITHER)
479 *looping = MIN (*looping, looping2);
482 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
483 into STATE and LOOPING worse of the two variants.
484 N is the actual node called. */
486 static inline void
487 worse_state (enum pure_const_state_e *state, bool *looping,
488 enum pure_const_state_e state2, bool looping2,
489 struct symtab_node *from,
490 struct symtab_node *to)
492 /* Consider function:
494 bool a(int *p)
496 return *p==*p;
499 During early optimization we will turn this into:
501 bool a(int *p)
503 return true;
506 Now if this function will be detected as CONST however when interposed it
507 may end up being just pure. We always must assume the worst scenario here.
509 if (*state == IPA_CONST && state2 == IPA_CONST
510 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
512 if (dump_file && (dump_flags & TDF_DETAILS))
513 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
514 "bind to current def.\n", to->dump_name ());
515 state2 = IPA_PURE;
517 *state = MAX (*state, state2);
518 *looping = MAX (*looping, looping2);
521 /* Recognize special cases of builtins that are by themselves not const
522 but function using them is. */
523 bool
524 builtin_safe_for_const_function_p (bool *looping, tree callee)
526 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
527 switch (DECL_FUNCTION_CODE (callee))
529 case BUILT_IN_RETURN:
530 case BUILT_IN_UNREACHABLE:
531 CASE_BUILT_IN_ALLOCA:
532 case BUILT_IN_STACK_SAVE:
533 case BUILT_IN_STACK_RESTORE:
534 case BUILT_IN_EH_POINTER:
535 case BUILT_IN_EH_FILTER:
536 case BUILT_IN_UNWIND_RESUME:
537 case BUILT_IN_CXA_END_CLEANUP:
538 case BUILT_IN_EH_COPY_VALUES:
539 case BUILT_IN_FRAME_ADDRESS:
540 case BUILT_IN_APPLY_ARGS:
541 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
542 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
543 case BUILT_IN_DWARF_CFA:
544 case BUILT_IN_RETURN_ADDRESS:
545 *looping = false;
546 return true;
547 case BUILT_IN_PREFETCH:
548 *looping = true;
549 return true;
550 default:
551 break;
553 return false;
556 /* Check the parameters of a function call to CALL_EXPR to see if
557 there are any references in the parameters that are not allowed for
558 pure or const functions. Also check to see if this is either an
559 indirect call, a call outside the compilation unit, or has special
560 attributes that may also effect the purity. The CALL_EXPR node for
561 the entire call expression. */
563 static void
564 check_call (funct_state local, gcall *call, bool ipa)
566 int flags = gimple_call_flags (call);
567 tree callee_t = gimple_call_fndecl (call);
568 bool possibly_throws = stmt_could_throw_p (cfun, call);
569 bool possibly_throws_externally = (possibly_throws
570 && stmt_can_throw_external (cfun, call));
572 if (possibly_throws)
574 unsigned int i;
575 for (i = 0; i < gimple_num_ops (call); i++)
576 if (gimple_op (call, i)
577 && tree_could_throw_p (gimple_op (call, i)))
579 if (possibly_throws && cfun->can_throw_non_call_exceptions)
581 if (dump_file)
582 fprintf (dump_file, " operand can throw; looping\n");
583 local->looping = true;
585 if (possibly_throws_externally)
587 if (dump_file)
588 fprintf (dump_file, " operand can throw externally\n");
589 local->can_throw = true;
594 /* The const and pure flags are set by a variety of places in the
595 compiler (including here). If someone has already set the flags
596 for the callee, (such as for some of the builtins) we will use
597 them, otherwise we will compute our own information.
599 Const and pure functions have less clobber effects than other
600 functions so we process these first. Otherwise if it is a call
601 outside the compilation unit or an indirect call we punt. This
602 leaves local calls which will be processed by following the call
603 graph. */
604 if (callee_t)
606 bool call_looping;
608 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
609 && !nonfreeing_call_p (call))
610 local->can_free = true;
612 if (builtin_safe_for_const_function_p (&call_looping, callee_t))
614 worse_state (&local->pure_const_state, &local->looping,
615 IPA_CONST, call_looping,
616 NULL, NULL);
617 return;
619 /* When bad things happen to bad functions, they cannot be const
620 or pure. */
621 if (setjmp_call_p (callee_t))
623 if (dump_file)
624 fprintf (dump_file, " setjmp is not const/pure\n");
625 local->looping = true;
626 local->pure_const_state = IPA_NEITHER;
629 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
630 switch (DECL_FUNCTION_CODE (callee_t))
632 case BUILT_IN_LONGJMP:
633 case BUILT_IN_NONLOCAL_GOTO:
634 if (dump_file)
635 fprintf (dump_file,
636 " longjmp and nonlocal goto is not const/pure\n");
637 local->pure_const_state = IPA_NEITHER;
638 local->looping = true;
639 break;
640 default:
641 break;
644 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
645 local->can_free = true;
647 /* When not in IPA mode, we can still handle self recursion. */
648 if (!ipa && callee_t
649 && recursive_call_p (current_function_decl, callee_t))
651 if (dump_file)
652 fprintf (dump_file, " Recursive call can loop.\n");
653 local->looping = true;
655 /* Either callee is unknown or we are doing local analysis.
656 Look to see if there are any bits available for the callee (such as by
657 declaration or because it is builtin) and process solely on the basis of
658 those bits. Handle internal calls always, those calls don't have
659 corresponding cgraph edges and thus aren't processed during
660 the propagation. */
661 else if (!ipa || gimple_call_internal_p (call))
663 enum pure_const_state_e call_state;
664 bool call_looping;
665 if (possibly_throws && cfun->can_throw_non_call_exceptions)
667 if (dump_file)
668 fprintf (dump_file, " can throw; looping\n");
669 local->looping = true;
671 if (possibly_throws_externally)
673 if (dump_file)
675 fprintf (dump_file, " can throw externally to lp %i\n",
676 lookup_stmt_eh_lp (call));
677 if (callee_t)
678 fprintf (dump_file, " callee:%s\n",
679 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
681 local->can_throw = true;
683 if (dump_file && (dump_flags & TDF_DETAILS))
684 fprintf (dump_file, " checking flags for call:");
685 state_from_flags (&call_state, &call_looping, flags,
686 ((flags & (ECF_NORETURN | ECF_NOTHROW))
687 == (ECF_NORETURN | ECF_NOTHROW))
688 || (!flag_exceptions && (flags & ECF_NORETURN)));
689 worse_state (&local->pure_const_state, &local->looping,
690 call_state, call_looping, NULL, NULL);
692 /* Direct functions calls are handled by IPA propagation. */
695 /* Wrapper around check_decl for loads in local more. */
697 static bool
698 check_load (gimple *, tree op, tree, void *data)
700 if (DECL_P (op))
701 check_decl ((funct_state)data, op, false, false);
702 else
703 check_op ((funct_state)data, op, false);
704 return false;
707 /* Wrapper around check_decl for stores in local more. */
709 static bool
710 check_store (gimple *, tree op, tree, void *data)
712 if (DECL_P (op))
713 check_decl ((funct_state)data, op, true, false);
714 else
715 check_op ((funct_state)data, op, true);
716 return false;
719 /* Wrapper around check_decl for loads in ipa mode. */
721 static bool
722 check_ipa_load (gimple *, tree op, tree, void *data)
724 if (DECL_P (op))
725 check_decl ((funct_state)data, op, false, true);
726 else
727 check_op ((funct_state)data, op, false);
728 return false;
731 /* Wrapper around check_decl for stores in ipa mode. */
733 static bool
734 check_ipa_store (gimple *, tree op, tree, void *data)
736 if (DECL_P (op))
737 check_decl ((funct_state)data, op, true, true);
738 else
739 check_op ((funct_state)data, op, true);
740 return false;
743 /* Look into pointer pointed to by GSIP and figure out what interesting side
744 effects it has. */
745 static void
746 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
748 gimple *stmt = gsi_stmt (*gsip);
750 if (is_gimple_debug (stmt))
751 return;
753 /* Do consider clobber as side effects before IPA, so we rather inline
754 C++ destructors and keep clobber semantics than eliminate them.
756 Similar logic is in ipa-modref.
758 TODO: We may get smarter during early optimizations on these and let
759 functions containing only clobbers to be optimized more. This is a common
760 case of C++ destructors. */
762 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
763 return;
765 if (dump_file)
767 fprintf (dump_file, " scanning: ");
768 print_gimple_stmt (dump_file, stmt, 0);
771 if (gimple_has_volatile_ops (stmt)
772 && !gimple_clobber_p (stmt))
774 local->pure_const_state = IPA_NEITHER;
775 if (dump_file)
776 fprintf (dump_file, " Volatile stmt is not const/pure\n");
779 /* Look for loads and stores. */
780 walk_stmt_load_store_ops (stmt, local,
781 ipa ? check_ipa_load : check_load,
782 ipa ? check_ipa_store : check_store);
784 if (gimple_code (stmt) != GIMPLE_CALL
785 && stmt_could_throw_p (cfun, stmt))
787 if (cfun->can_throw_non_call_exceptions)
789 if (dump_file)
790 fprintf (dump_file, " can throw; looping\n");
791 local->looping = true;
793 if (stmt_can_throw_external (cfun, stmt))
795 if (dump_file)
796 fprintf (dump_file, " can throw externally\n");
797 local->can_throw = true;
799 else
800 if (dump_file)
801 fprintf (dump_file, " can throw\n");
803 switch (gimple_code (stmt))
805 case GIMPLE_CALL:
806 check_call (local, as_a <gcall *> (stmt), ipa);
807 break;
808 case GIMPLE_LABEL:
809 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
810 /* Target of long jump. */
812 if (dump_file)
813 fprintf (dump_file, " nonlocal label is not const/pure\n");
814 local->pure_const_state = IPA_NEITHER;
816 break;
817 case GIMPLE_ASM:
818 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
820 if (dump_file)
821 fprintf (dump_file, " memory asm clobber is not const/pure\n");
822 /* Abandon all hope, ye who enter here. */
823 local->pure_const_state = IPA_NEITHER;
824 local->can_free = true;
826 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
828 if (dump_file)
829 fprintf (dump_file, " volatile is not const/pure\n");
830 /* Abandon all hope, ye who enter here. */
831 local->pure_const_state = IPA_NEITHER;
832 local->looping = true;
833 local->can_free = true;
835 return;
836 default:
837 break;
841 /* Check that RETVAL is used only in STMT and in comparisons against 0.
842 RETVAL is return value of the function and STMT is return stmt. */
844 static bool
845 check_retval_uses (tree retval, gimple *stmt)
847 imm_use_iterator use_iter;
848 gimple *use_stmt;
850 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
851 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
853 tree op2 = gimple_cond_rhs (cond);
854 if (!integer_zerop (op2))
855 return false;
857 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
859 enum tree_code code = gimple_assign_rhs_code (ga);
860 if (TREE_CODE_CLASS (code) != tcc_comparison)
861 return false;
862 if (!integer_zerop (gimple_assign_rhs2 (ga)))
863 return false;
865 else if (is_gimple_debug (use_stmt))
867 else if (use_stmt != stmt)
868 return false;
870 return true;
873 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
874 attribute. Currently this function does a very conservative analysis.
875 FUN is considered to be a candidate if
876 1) It returns a value of pointer type.
877 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
878 a phi, and element of phi is either NULL or
879 SSA_NAME_DEF_STMT(element) is function call.
880 3) The return-value has immediate uses only within comparisons (gcond or gassign)
881 and return_stmt (and likewise a phi arg has immediate use only within comparison
882 or the phi stmt). */
884 #define DUMP_AND_RETURN(reason) \
886 if (dump_file && (dump_flags & TDF_DETAILS)) \
887 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
888 (node->dump_name ()), (reason)); \
889 return false; \
892 static bool
893 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
894 bitmap visited)
896 cgraph_node *node = cgraph_node::get_create (fun->decl);
897 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
898 return true;
900 if (!check_retval_uses (retval, ret_stmt))
901 DUMP_AND_RETURN("Return value has uses outside return stmt"
902 " and comparisons against 0.")
904 gimple *def = SSA_NAME_DEF_STMT (retval);
906 if (gcall *call_stmt = dyn_cast<gcall *> (def))
908 tree callee_decl = gimple_call_fndecl (call_stmt);
909 if (!callee_decl)
910 return false;
912 if (!ipa && !DECL_IS_MALLOC (callee_decl))
913 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
914 " non-ipa mode.")
916 cgraph_edge *cs = node->get_edge (call_stmt);
917 if (cs)
919 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
920 es->is_return_callee_uncaptured = true;
924 else if (gphi *phi = dyn_cast<gphi *> (def))
926 bool all_args_zero = true;
927 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
929 tree arg = gimple_phi_arg_def (phi, i);
930 if (integer_zerop (arg))
931 continue;
933 all_args_zero = false;
934 if (TREE_CODE (arg) != SSA_NAME)
935 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
936 if (!check_retval_uses (arg, phi))
937 DUMP_AND_RETURN ("phi arg has uses outside phi"
938 " and comparisons against 0.")
940 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
941 if (is_a<gphi *> (arg_def))
943 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
944 DUMP_AND_RETURN ("nested phi fail")
945 continue;
948 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
949 if (!call_stmt)
950 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
952 tree callee_decl = gimple_call_fndecl (call_stmt);
953 if (!callee_decl)
954 return false;
955 if (!ipa && !DECL_IS_MALLOC (callee_decl))
956 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
957 " for non-ipa mode.")
959 cgraph_edge *cs = node->get_edge (call_stmt);
960 if (cs)
962 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
963 es->is_return_callee_uncaptured = true;
967 if (all_args_zero)
968 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
971 else
972 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
974 return true;
977 static bool
978 malloc_candidate_p (function *fun, bool ipa)
980 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
981 edge e;
982 edge_iterator ei;
983 cgraph_node *node = cgraph_node::get_create (fun->decl);
985 if (EDGE_COUNT (exit_block->preds) == 0
986 || !flag_delete_null_pointer_checks)
987 return false;
989 auto_bitmap visited;
990 FOR_EACH_EDGE (e, ei, exit_block->preds)
992 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
993 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
995 if (!ret_stmt)
996 return false;
998 tree retval = gimple_return_retval (ret_stmt);
999 if (!retval)
1000 DUMP_AND_RETURN("No return value.")
1002 if (TREE_CODE (retval) != SSA_NAME
1003 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
1004 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
1006 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
1007 return false;
1010 if (dump_file && (dump_flags & TDF_DETAILS))
1011 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1012 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1013 return true;
1016 #undef DUMP_AND_RETURN
1018 /* Return true if function is known to be finite. */
1020 bool
1021 finite_function_p ()
1023 /* Const functions cannot have back edges (an
1024 indication of possible infinite loop side
1025 effect. */
1026 bool finite = true;
1027 if (mark_dfs_back_edges ())
1029 /* Preheaders are needed for SCEV to work.
1030 Simple latches and recorded exits improve chances that loop will
1031 proved to be finite in testcases such as in loop-15.c
1032 and loop-24.c */
1033 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1034 | LOOPS_HAVE_SIMPLE_LATCHES
1035 | LOOPS_HAVE_RECORDED_EXITS);
1036 if (dump_file && (dump_flags & TDF_DETAILS))
1037 flow_loops_dump (dump_file, NULL, 0);
1038 if (mark_irreducible_loops ())
1040 if (dump_file)
1041 fprintf (dump_file, " has irreducible loops\n");
1042 finite = false;
1044 else
1046 scev_initialize ();
1047 for (auto loop : loops_list (cfun, 0))
1048 if (!finite_loop_p (loop))
1050 if (dump_file)
1051 fprintf (dump_file, " cannot prove finiteness of "
1052 "loop %i\n", loop->num);
1053 finite =false;
1054 break;
1056 scev_finalize ();
1058 loop_optimizer_finalize ();
1060 return finite;
1063 /* This is the main routine for finding the reference patterns for
1064 global variables within a function FN. */
1066 static funct_state
1067 analyze_function (struct cgraph_node *fn, bool ipa)
1069 tree decl = fn->decl;
1070 funct_state l;
1071 basic_block this_block;
1073 l = XCNEW (class funct_state_d);
1074 l->pure_const_state = IPA_CONST;
1075 l->state_previously_known = IPA_NEITHER;
1076 l->looping_previously_known = true;
1077 l->looping = false;
1078 l->can_throw = false;
1079 l->can_free = false;
1080 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1081 flags_from_decl_or_type (fn->decl),
1082 fn->cannot_return_p ());
1084 if (fn->thunk || fn->alias)
1086 /* Thunk gets propagated through, so nothing interesting happens. */
1087 gcc_assert (ipa);
1088 if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1089 l->pure_const_state = IPA_NEITHER;
1090 return l;
1093 if (dump_file)
1095 fprintf (dump_file, "\n\n local analysis of %s\n ",
1096 fn->dump_name ());
1099 push_cfun (DECL_STRUCT_FUNCTION (decl));
1101 FOR_EACH_BB_FN (this_block, cfun)
1103 gimple_stmt_iterator gsi;
1104 struct walk_stmt_info wi;
1106 memset (&wi, 0, sizeof (wi));
1107 for (gsi = gsi_start_bb (this_block);
1108 !gsi_end_p (gsi);
1109 gsi_next (&gsi))
1111 /* NULL memory accesses terminates BB. These accesses are known
1112 to trip undefined behaviour. gimple-ssa-isolate-paths turns them
1113 to volatile accesses and adds builtin_trap call which would
1114 confuse us otherwise. */
1115 if (infer_nonnull_range_by_dereference (gsi_stmt (gsi),
1116 null_pointer_node))
1118 if (dump_file)
1119 fprintf (dump_file, " NULL memory access; terminating BB%s\n",
1120 flag_non_call_exceptions ? "; looping" : "");
1121 if (flag_non_call_exceptions)
1123 l->looping = true;
1124 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
1126 if (dump_file)
1127 fprintf (dump_file, " can throw externally\n");
1128 l->can_throw = true;
1131 break;
1133 check_stmt (&gsi, l, ipa);
1134 if (l->pure_const_state == IPA_NEITHER
1135 && l->looping
1136 && l->can_throw
1137 && l->can_free)
1138 goto end;
1142 end:
1143 if (l->pure_const_state != IPA_NEITHER
1144 && !l->looping
1145 && !finite_function_p ())
1146 l->looping = true;
1148 if (dump_file && (dump_flags & TDF_DETAILS))
1149 fprintf (dump_file, " checking previously known:");
1151 better_state (&l->pure_const_state, &l->looping,
1152 l->state_previously_known,
1153 l->looping_previously_known);
1154 if (TREE_NOTHROW (decl))
1155 l->can_throw = false;
1157 l->malloc_state = STATE_MALLOC_BOTTOM;
1158 if (DECL_IS_MALLOC (decl))
1159 l->malloc_state = STATE_MALLOC;
1160 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1161 l->malloc_state = STATE_MALLOC_TOP;
1162 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1163 l->malloc_state = STATE_MALLOC;
1165 pop_cfun ();
1166 if (dump_file)
1168 if (l->looping)
1169 fprintf (dump_file, "Function is locally looping.\n");
1170 if (l->can_throw)
1171 fprintf (dump_file, "Function is locally throwing.\n");
1172 if (l->pure_const_state == IPA_CONST)
1173 fprintf (dump_file, "Function is locally const.\n");
1174 if (l->pure_const_state == IPA_PURE)
1175 fprintf (dump_file, "Function is locally pure.\n");
1176 if (l->can_free)
1177 fprintf (dump_file, "Function can locally free.\n");
1178 if (l->malloc_state == STATE_MALLOC)
1179 fprintf (dump_file, "Function is locally malloc.\n");
1181 return l;
1184 void
1185 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1187 /* There are some shared nodes, in particular the initializers on
1188 static declarations. We do not need to scan them more than once
1189 since all we would be interested in are the addressof
1190 operations. */
1191 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1193 funct_state_d *a = analyze_function (node, true);
1194 new (state) funct_state_d (*a);
1195 free (a);
1197 else
1198 /* Do not keep stale summaries. */
1199 funct_state_summaries->remove (node);
1202 /* Called when new clone is inserted to callgraph late. */
1204 void
1205 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1206 funct_state_d *src_data,
1207 funct_state_d *dst_data)
1209 new (dst_data) funct_state_d (*src_data);
1210 if (dst_data->malloc_state == STATE_MALLOC
1211 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1212 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1216 void
1217 pass_ipa_pure_const::
1218 register_hooks (void)
1220 if (init_p)
1221 return;
1223 init_p = true;
1225 funct_state_summaries = new funct_state_summary_t (symtab);
1229 /* Analyze each function in the cgraph to see if it is locally PURE or
1230 CONST. */
1232 static void
1233 pure_const_generate_summary (void)
1235 struct cgraph_node *node;
1237 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1238 pass->register_hooks ();
1240 /* Process all of the functions.
1242 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1243 by default, but the info can be used at LTO with -fwhole-program or
1244 when function got cloned and the clone is AVAILABLE. */
1246 FOR_EACH_DEFINED_FUNCTION (node)
1247 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1249 funct_state_d *a = analyze_function (node, true);
1250 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1251 free (a);
1256 /* Serialize the ipa info for lto. */
1258 static void
1259 pure_const_write_summary (void)
1261 struct cgraph_node *node;
1262 struct lto_simple_output_block *ob
1263 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1264 unsigned int count = 0;
1265 lto_symtab_encoder_iterator lsei;
1266 lto_symtab_encoder_t encoder;
1268 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1270 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1271 lsei_next_function_in_partition (&lsei))
1273 node = lsei_cgraph_node (lsei);
1274 if (node->definition && funct_state_summaries->exists (node))
1275 count++;
1278 streamer_write_uhwi_stream (ob->main_stream, count);
1280 /* Process all of the functions. */
1281 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1282 lsei_next_function_in_partition (&lsei))
1284 node = lsei_cgraph_node (lsei);
1285 funct_state_d *fs = funct_state_summaries->get (node);
1286 if (node->definition && fs != NULL)
1288 struct bitpack_d bp;
1289 int node_ref;
1290 lto_symtab_encoder_t encoder;
1292 encoder = ob->decl_state->symtab_node_encoder;
1293 node_ref = lto_symtab_encoder_encode (encoder, node);
1294 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1296 /* Note that flags will need to be read in the opposite
1297 order as we are pushing the bitflags into FLAGS. */
1298 bp = bitpack_create (ob->main_stream);
1299 bp_pack_value (&bp, fs->pure_const_state, 2);
1300 bp_pack_value (&bp, fs->state_previously_known, 2);
1301 bp_pack_value (&bp, fs->looping_previously_known, 1);
1302 bp_pack_value (&bp, fs->looping, 1);
1303 bp_pack_value (&bp, fs->can_throw, 1);
1304 bp_pack_value (&bp, fs->can_free, 1);
1305 bp_pack_value (&bp, fs->malloc_state, 2);
1306 streamer_write_bitpack (&bp);
1310 lto_destroy_simple_output_block (ob);
1314 /* Deserialize the ipa info for lto. */
1316 static void
1317 pure_const_read_summary (void)
1319 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1320 struct lto_file_decl_data *file_data;
1321 unsigned int j = 0;
1323 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1324 pass->register_hooks ();
1326 while ((file_data = file_data_vec[j++]))
1328 const char *data;
1329 size_t len;
1330 class lto_input_block *ib
1331 = lto_create_simple_input_block (file_data,
1332 LTO_section_ipa_pure_const,
1333 &data, &len);
1334 if (ib)
1336 unsigned int i;
1337 unsigned int count = streamer_read_uhwi (ib);
1339 for (i = 0; i < count; i++)
1341 unsigned int index;
1342 struct cgraph_node *node;
1343 struct bitpack_d bp;
1344 funct_state fs;
1345 lto_symtab_encoder_t encoder;
1347 index = streamer_read_uhwi (ib);
1348 encoder = file_data->symtab_node_encoder;
1349 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1350 index));
1352 fs = funct_state_summaries->get_create (node);
1353 /* Note that the flags must be read in the opposite
1354 order in which they were written (the bitflags were
1355 pushed into FLAGS). */
1356 bp = streamer_read_bitpack (ib);
1357 fs->pure_const_state
1358 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1359 fs->state_previously_known
1360 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1361 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1362 fs->looping = bp_unpack_value (&bp, 1);
1363 fs->can_throw = bp_unpack_value (&bp, 1);
1364 fs->can_free = bp_unpack_value (&bp, 1);
1365 fs->malloc_state
1366 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1368 if (dump_file)
1370 int flags = flags_from_decl_or_type (node->decl);
1371 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1372 if (flags & ECF_CONST)
1373 fprintf (dump_file, " const");
1374 if (flags & ECF_PURE)
1375 fprintf (dump_file, " pure");
1376 if (flags & ECF_NOTHROW)
1377 fprintf (dump_file, " nothrow");
1378 fprintf (dump_file, "\n pure const state: %s\n",
1379 pure_const_names[fs->pure_const_state]);
1380 fprintf (dump_file, " previously known state: %s\n",
1381 pure_const_names[fs->state_previously_known]);
1382 if (fs->looping)
1383 fprintf (dump_file," function is locally looping\n");
1384 if (fs->looping_previously_known)
1385 fprintf (dump_file," function is previously known looping\n");
1386 if (fs->can_throw)
1387 fprintf (dump_file," function is locally throwing\n");
1388 if (fs->can_free)
1389 fprintf (dump_file," function can locally free\n");
1390 fprintf (dump_file, "\n malloc state: %s\n",
1391 malloc_state_names[fs->malloc_state]);
1395 lto_destroy_simple_input_block (file_data,
1396 LTO_section_ipa_pure_const,
1397 ib, data, len);
1402 /* We only propagate across edges that can throw externally and their callee
1403 is not interposable. */
1405 static bool
1406 ignore_edge_for_nothrow (struct cgraph_edge *e)
1408 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1409 return true;
1411 enum availability avail;
1412 cgraph_node *ultimate_target
1413 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1414 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1415 return true;
1416 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1417 && !e->callee->binds_to_current_def_p (e->caller))
1418 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1419 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1422 /* Return true if NODE is self recursive function.
1423 Indirectly recursive functions appears as non-trivial strongly
1424 connected components, so we need to care about self recursion
1425 only. */
1427 static bool
1428 self_recursive_p (struct cgraph_node *node)
1430 struct cgraph_edge *e;
1431 for (e = node->callees; e; e = e->next_callee)
1432 if (e->callee->function_symbol () == node)
1433 return true;
1434 return false;
1437 /* Return true if N is cdtor that is not const or pure. In this case we may
1438 need to remove unreachable function if it is marked const/pure. */
1440 static bool
1441 cdtor_p (cgraph_node *n, void *)
1443 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1444 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1445 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1446 return false;
1449 /* Skip edges from and to nodes without ipa_pure_const enabled.
1450 Ignore not available symbols. */
1452 static bool
1453 ignore_edge_for_pure_const (struct cgraph_edge *e)
1455 enum availability avail;
1456 cgraph_node *ultimate_target
1457 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1459 return (avail <= AVAIL_INTERPOSABLE
1460 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1461 || !opt_for_fn (ultimate_target->decl,
1462 flag_ipa_pure_const));
1465 /* Return true if function should be skipped for local pure const analysis. */
1467 static bool
1468 skip_function_for_local_pure_const (struct cgraph_node *node)
1470 /* Because we do not schedule pass_fixup_cfg over whole program after early
1471 optimizations we must not promote functions that are called by already
1472 processed functions. */
1474 if (function_called_by_processed_nodes_p ())
1476 if (dump_file)
1477 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1478 return true;
1480 /* Save some work and do not analyze functions which are interposable and
1481 do not have any non-interposable aliases. */
1482 if (node->get_availability () <= AVAIL_INTERPOSABLE
1483 && !flag_lto
1484 && !node->has_aliases_p ())
1486 if (dump_file)
1487 fprintf (dump_file,
1488 "Function is interposable; not analyzing.\n");
1489 return true;
1491 return false;
1494 /* Make function const and output warning. If LOCAL is true,
1495 return true if anything changed. Otherwise return true if
1496 we may have introduced removale ctors. */
1498 bool
1499 ipa_make_function_const (struct cgraph_node *node, bool looping, bool local)
1501 bool cdtor = false;
1503 if (TREE_READONLY (node->decl)
1504 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl)))
1505 return false;
1506 warn_function_const (node->decl, !looping);
1507 if (local && skip_function_for_local_pure_const (node))
1508 return false;
1509 if (dump_file)
1510 fprintf (dump_file, "Function found to be %sconst: %s\n",
1511 looping ? "looping " : "",
1512 node->dump_name ());
1513 if (!local && !looping)
1514 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1515 if (!dbg_cnt (ipa_attr))
1516 return false;
1517 if (node->set_const_flag (true, looping))
1519 if (dump_file)
1520 fprintf (dump_file,
1521 "Declaration updated to be %sconst: %s\n",
1522 looping ? "looping " : "",
1523 node->dump_name ());
1524 if (local)
1525 return true;
1526 return cdtor;
1528 return false;
1531 /* Make function const and output warning. If LOCAL is true,
1532 return true if anything changed. Otherwise return true if
1533 we may have introduced removale ctors. */
1535 bool
1536 ipa_make_function_pure (struct cgraph_node *node, bool looping, bool local)
1538 bool cdtor = false;
1540 if (TREE_READONLY (node->decl)
1541 || (DECL_PURE_P (node->decl)
1542 && (looping || !DECL_LOOPING_CONST_OR_PURE_P (node->decl))))
1543 return false;
1544 warn_function_pure (node->decl, !looping);
1545 if (local && skip_function_for_local_pure_const (node))
1546 return false;
1547 if (dump_file)
1548 fprintf (dump_file, "Function found to be %spure: %s\n",
1549 looping ? "looping " : "",
1550 node->dump_name ());
1551 if (!local && !looping)
1552 cdtor = node->call_for_symbol_and_aliases (cdtor_p, NULL, true);
1553 if (!dbg_cnt (ipa_attr))
1554 return false;
1555 if (node->set_pure_flag (true, looping))
1557 if (dump_file)
1558 fprintf (dump_file,
1559 "Declaration updated to be %spure: %s\n",
1560 looping ? "looping " : "",
1561 node->dump_name ());
1562 if (local)
1563 return true;
1564 return cdtor;
1566 return false;
1569 /* Produce transitive closure over the callgraph and compute pure/const
1570 attributes. */
1572 static bool
1573 propagate_pure_const (void)
1575 struct cgraph_node *node;
1576 struct cgraph_node *w;
1577 struct cgraph_node **order =
1578 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1579 int order_pos;
1580 int i;
1581 struct ipa_dfs_info * w_info;
1582 bool remove_p = false;
1584 order_pos = ipa_reduced_postorder (order, true,
1585 ignore_edge_for_pure_const);
1586 if (dump_file)
1588 cgraph_node::dump_cgraph (dump_file);
1589 ipa_print_order (dump_file, "reduced", order, order_pos);
1592 /* Propagate the local information through the call graph to produce
1593 the global information. All the nodes within a cycle will have
1594 the same info so we collapse cycles first. Then we can do the
1595 propagation in one pass from the leaves to the roots. */
1596 for (i = 0; i < order_pos; i++ )
1598 enum pure_const_state_e pure_const_state = IPA_CONST;
1599 bool looping = false;
1600 int count = 0;
1601 node = order[i];
1603 if (node->alias)
1604 continue;
1606 if (dump_file && (dump_flags & TDF_DETAILS))
1607 fprintf (dump_file, "Starting cycle\n");
1609 /* Find the worst state for any node in the cycle. */
1610 w = node;
1611 while (w && pure_const_state != IPA_NEITHER)
1613 struct cgraph_edge *e;
1614 struct cgraph_edge *ie;
1615 int i;
1616 struct ipa_ref *ref = NULL;
1618 funct_state w_l = funct_state_summaries->get_create (w);
1619 if (dump_file && (dump_flags & TDF_DETAILS))
1620 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1621 w->dump_name (),
1622 pure_const_names[w_l->pure_const_state],
1623 w_l->looping);
1625 /* First merge in function body properties.
1626 We are safe to pass NULL as FROM and TO because we will take care
1627 of possible interposition when walking callees. */
1628 worse_state (&pure_const_state, &looping,
1629 w_l->pure_const_state, w_l->looping,
1630 NULL, NULL);
1631 if (pure_const_state == IPA_NEITHER)
1632 break;
1634 count++;
1636 /* We consider recursive cycles as possibly infinite.
1637 This might be relaxed since infinite recursion leads to stack
1638 overflow. */
1639 if (count > 1)
1640 looping = true;
1642 /* Now walk the edges and merge in callee properties. */
1643 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1644 e = e->next_callee)
1646 enum availability avail;
1647 struct cgraph_node *y = e->callee->
1648 function_or_virtual_thunk_symbol (&avail,
1649 e->caller);
1650 enum pure_const_state_e edge_state = IPA_CONST;
1651 bool edge_looping = false;
1653 if (e->recursive_p ())
1654 looping = true;
1656 if (dump_file && (dump_flags & TDF_DETAILS))
1658 fprintf (dump_file, " Call to %s",
1659 e->callee->dump_name ());
1661 if (avail > AVAIL_INTERPOSABLE)
1663 funct_state y_l = funct_state_summaries->get_create (y);
1665 if (dump_file && (dump_flags & TDF_DETAILS))
1667 fprintf (dump_file,
1668 " state:%s looping:%i\n",
1669 pure_const_names[y_l->pure_const_state],
1670 y_l->looping);
1672 if (y_l->pure_const_state > IPA_PURE
1673 && e->cannot_lead_to_return_p ())
1675 if (dump_file && (dump_flags & TDF_DETAILS))
1676 fprintf (dump_file,
1677 " Ignoring side effects"
1678 " -> pure, looping\n");
1679 edge_state = IPA_PURE;
1680 edge_looping = true;
1682 else
1684 edge_state = y_l->pure_const_state;
1685 edge_looping = y_l->looping;
1688 else if (builtin_safe_for_const_function_p (&edge_looping,
1689 y->decl))
1690 edge_state = IPA_CONST;
1691 else
1692 state_from_flags (&edge_state, &edge_looping,
1693 flags_from_decl_or_type (y->decl),
1694 e->cannot_lead_to_return_p ());
1696 /* Merge the results with what we already know. */
1697 better_state (&edge_state, &edge_looping,
1698 w_l->state_previously_known,
1699 w_l->looping_previously_known);
1700 worse_state (&pure_const_state, &looping,
1701 edge_state, edge_looping, e->caller, e->callee);
1702 if (pure_const_state == IPA_NEITHER)
1703 break;
1706 /* Now process the indirect call. */
1707 for (ie = w->indirect_calls;
1708 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1710 enum pure_const_state_e edge_state = IPA_CONST;
1711 bool edge_looping = false;
1713 if (dump_file && (dump_flags & TDF_DETAILS))
1714 fprintf (dump_file, " Indirect call");
1715 state_from_flags (&edge_state, &edge_looping,
1716 ie->indirect_info->ecf_flags,
1717 ie->cannot_lead_to_return_p ());
1718 /* Merge the results with what we already know. */
1719 better_state (&edge_state, &edge_looping,
1720 w_l->state_previously_known,
1721 w_l->looping_previously_known);
1722 worse_state (&pure_const_state, &looping,
1723 edge_state, edge_looping, NULL, NULL);
1724 if (pure_const_state == IPA_NEITHER)
1725 break;
1728 /* And finally all loads and stores. */
1729 for (i = 0; w->iterate_reference (i, ref)
1730 && pure_const_state != IPA_NEITHER; i++)
1732 enum pure_const_state_e ref_state = IPA_CONST;
1733 bool ref_looping = false;
1734 switch (ref->use)
1736 case IPA_REF_LOAD:
1737 /* readonly reads are safe. */
1738 if (TREE_READONLY (ref->referred->decl))
1739 break;
1740 if (dump_file && (dump_flags & TDF_DETAILS))
1741 fprintf (dump_file, " nonreadonly global var read\n");
1742 ref_state = IPA_PURE;
1743 break;
1744 case IPA_REF_STORE:
1745 if (ref->cannot_lead_to_return ())
1746 break;
1747 ref_state = IPA_NEITHER;
1748 if (dump_file && (dump_flags & TDF_DETAILS))
1749 fprintf (dump_file, " global var write\n");
1750 break;
1751 case IPA_REF_ADDR:
1752 break;
1753 default:
1754 gcc_unreachable ();
1756 better_state (&ref_state, &ref_looping,
1757 w_l->state_previously_known,
1758 w_l->looping_previously_known);
1759 worse_state (&pure_const_state, &looping,
1760 ref_state, ref_looping, NULL, NULL);
1761 if (pure_const_state == IPA_NEITHER)
1762 break;
1764 w_info = (struct ipa_dfs_info *) w->aux;
1765 w = w_info->next_cycle;
1767 if (dump_file && (dump_flags & TDF_DETAILS))
1768 fprintf (dump_file, "Result %s looping %i\n",
1769 pure_const_names [pure_const_state],
1770 looping);
1772 /* Find the worst state of can_free for any node in the cycle. */
1773 bool can_free = false;
1774 w = node;
1775 while (w && !can_free)
1777 struct cgraph_edge *e;
1778 funct_state w_l = funct_state_summaries->get (w);
1780 if (w_l->can_free
1781 || w->get_availability () == AVAIL_INTERPOSABLE
1782 || w->indirect_calls)
1783 can_free = true;
1785 for (e = w->callees; e && !can_free; e = e->next_callee)
1787 enum availability avail;
1788 struct cgraph_node *y = e->callee->
1789 function_or_virtual_thunk_symbol (&avail,
1790 e->caller);
1792 if (avail > AVAIL_INTERPOSABLE)
1793 can_free = funct_state_summaries->get (y)->can_free;
1794 else
1795 can_free = true;
1797 w_info = (struct ipa_dfs_info *) w->aux;
1798 w = w_info->next_cycle;
1801 /* Copy back the region's pure_const_state which is shared by
1802 all nodes in the region. */
1803 w = node;
1804 while (w)
1806 funct_state w_l = funct_state_summaries->get (w);
1807 enum pure_const_state_e this_state = pure_const_state;
1808 bool this_looping = looping;
1810 w_l->can_free = can_free;
1811 w->nonfreeing_fn = !can_free;
1812 if (!can_free && dump_file)
1813 fprintf (dump_file, "Function found not to call free: %s\n",
1814 w->dump_name ());
1816 if (w_l->state_previously_known != IPA_NEITHER
1817 && this_state > w_l->state_previously_known)
1819 if (this_state == IPA_NEITHER)
1820 this_looping = w_l->looping_previously_known;
1821 this_state = w_l->state_previously_known;
1823 if (!this_looping && self_recursive_p (w))
1824 this_looping = true;
1825 if (!w_l->looping_previously_known)
1826 this_looping = false;
1828 /* All nodes within a cycle share the same info. */
1829 w_l->pure_const_state = this_state;
1830 w_l->looping = this_looping;
1832 /* Inline clones share declaration with their offline copies;
1833 do not modify their declarations since the offline copy may
1834 be different. */
1835 if (!w->inlined_to)
1836 switch (this_state)
1838 case IPA_CONST:
1839 remove_p |= ipa_make_function_const (w, this_looping, false);
1840 break;
1842 case IPA_PURE:
1843 remove_p |= ipa_make_function_pure (w, this_looping, false);
1844 break;
1846 default:
1847 break;
1849 w_info = (struct ipa_dfs_info *) w->aux;
1850 w = w_info->next_cycle;
1854 ipa_free_postorder_info ();
1855 free (order);
1856 return remove_p;
1859 /* Produce transitive closure over the callgraph and compute nothrow
1860 attributes. */
1862 static void
1863 propagate_nothrow (void)
1865 struct cgraph_node *node;
1866 struct cgraph_node *w;
1867 struct cgraph_node **order =
1868 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1869 int order_pos;
1870 int i;
1871 struct ipa_dfs_info * w_info;
1873 order_pos = ipa_reduced_postorder (order, true,
1874 ignore_edge_for_nothrow);
1875 if (dump_file)
1877 cgraph_node::dump_cgraph (dump_file);
1878 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1881 /* Propagate the local information through the call graph to produce
1882 the global information. All the nodes within a cycle will have
1883 the same info so we collapse cycles first. Then we can do the
1884 propagation in one pass from the leaves to the roots. */
1885 for (i = 0; i < order_pos; i++ )
1887 bool can_throw = false;
1888 node = order[i];
1890 if (node->alias)
1891 continue;
1893 /* Find the worst state for any node in the cycle. */
1894 w = node;
1895 while (w && !can_throw)
1897 struct cgraph_edge *e, *ie;
1899 if (!TREE_NOTHROW (w->decl))
1901 funct_state w_l = funct_state_summaries->get_create (w);
1903 if (w_l->can_throw
1904 || w->get_availability () == AVAIL_INTERPOSABLE)
1905 can_throw = true;
1907 for (e = w->callees; e && !can_throw; e = e->next_callee)
1909 enum availability avail;
1911 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1912 continue;
1914 struct cgraph_node *y = e->callee->
1915 function_or_virtual_thunk_symbol (&avail,
1916 e->caller);
1918 /* We can use info about the callee only if we know it
1919 cannot be interposed.
1920 When callee is compiled with non-call exceptions we also
1921 must check that the declaration is bound to current
1922 body as other semantically equivalent body may still
1923 throw. */
1924 if (avail <= AVAIL_INTERPOSABLE
1925 || (!TREE_NOTHROW (y->decl)
1926 && (funct_state_summaries->get_create (y)->can_throw
1927 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1928 && !e->callee->binds_to_current_def_p (w)))))
1929 can_throw = true;
1931 for (ie = w->indirect_calls; ie && !can_throw;
1932 ie = ie->next_callee)
1933 if (ie->can_throw_external
1934 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1935 can_throw = true;
1937 w_info = (struct ipa_dfs_info *) w->aux;
1938 w = w_info->next_cycle;
1941 /* Copy back the region's pure_const_state which is shared by
1942 all nodes in the region. */
1943 w = node;
1944 while (w)
1946 funct_state w_l = funct_state_summaries->get_create (w);
1947 if (!can_throw && !TREE_NOTHROW (w->decl))
1949 /* Inline clones share declaration with their offline copies;
1950 do not modify their declarations since the offline copy may
1951 be different. */
1952 if (!w->inlined_to)
1954 w->set_nothrow_flag (true);
1955 if (dump_file)
1956 fprintf (dump_file, "Function found to be nothrow: %s\n",
1957 w->dump_name ());
1960 else if (can_throw && !TREE_NOTHROW (w->decl))
1961 w_l->can_throw = true;
1962 w_info = (struct ipa_dfs_info *) w->aux;
1963 w = w_info->next_cycle;
1967 ipa_free_postorder_info ();
1968 free (order);
1971 /* Debugging function to dump state of malloc lattice. */
1973 DEBUG_FUNCTION
1974 static void
1975 dump_malloc_lattice (FILE *dump_file, const char *s)
1977 if (!dump_file)
1978 return;
1980 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1981 cgraph_node *node;
1982 FOR_EACH_FUNCTION (node)
1984 funct_state fs = funct_state_summaries->get (node);
1985 if (fs)
1986 fprintf (dump_file, "%s: %s\n", node->dump_name (),
1987 malloc_state_names[fs->malloc_state]);
1991 /* Propagate malloc attribute across the callgraph. */
1993 static void
1994 propagate_malloc (void)
1996 cgraph_node *node;
1997 FOR_EACH_FUNCTION (node)
1999 if (DECL_IS_MALLOC (node->decl))
2000 if (!funct_state_summaries->exists (node))
2002 funct_state fs = funct_state_summaries->get_create (node);
2003 fs->malloc_state = STATE_MALLOC;
2007 dump_malloc_lattice (dump_file, "Initial");
2008 struct cgraph_node **order
2009 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
2010 int order_pos = ipa_reverse_postorder (order);
2011 bool changed = true;
2013 while (changed)
2015 changed = false;
2016 /* Walk in postorder. */
2017 for (int i = order_pos - 1; i >= 0; --i)
2019 cgraph_node *node = order[i];
2020 if (node->alias
2021 || !node->definition
2022 || !funct_state_summaries->exists (node))
2023 continue;
2025 funct_state l = funct_state_summaries->get (node);
2027 /* FIXME: add support for indirect-calls. */
2028 if (node->indirect_calls)
2030 l->malloc_state = STATE_MALLOC_BOTTOM;
2031 continue;
2034 if (node->get_availability () <= AVAIL_INTERPOSABLE)
2036 l->malloc_state = STATE_MALLOC_BOTTOM;
2037 continue;
2040 if (l->malloc_state == STATE_MALLOC_BOTTOM)
2041 continue;
2043 auto_vec<cgraph_node *, 16> callees;
2044 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2046 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
2047 if (es && es->is_return_callee_uncaptured)
2048 callees.safe_push (cs->callee);
2051 malloc_state_e new_state = l->malloc_state;
2052 for (unsigned j = 0; j < callees.length (); j++)
2054 cgraph_node *callee = callees[j];
2055 if (!funct_state_summaries->exists (node))
2057 new_state = STATE_MALLOC_BOTTOM;
2058 break;
2060 malloc_state_e callee_state
2061 = funct_state_summaries->get_create (callee)->malloc_state;
2062 if (new_state < callee_state)
2063 new_state = callee_state;
2065 if (new_state != l->malloc_state)
2067 changed = true;
2068 l->malloc_state = new_state;
2073 FOR_EACH_DEFINED_FUNCTION (node)
2074 if (funct_state_summaries->exists (node))
2076 funct_state l = funct_state_summaries->get (node);
2077 if (!node->alias
2078 && l->malloc_state == STATE_MALLOC
2079 && !node->inlined_to
2080 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
2082 if (dump_file && (dump_flags & TDF_DETAILS))
2083 fprintf (dump_file, "Function %s found to be malloc\n",
2084 node->dump_name ());
2086 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
2087 node->set_malloc_flag (true);
2088 if (!malloc_decl_p && warn_suggest_attribute_malloc)
2089 warn_function_malloc (node->decl);
2093 dump_malloc_lattice (dump_file, "after propagation");
2094 ipa_free_postorder_info ();
2095 free (order);
2098 /* Produce the global information by preforming a transitive closure
2099 on the local information that was produced by generate_summary. */
2101 unsigned int
2102 pass_ipa_pure_const::
2103 execute (function *)
2105 bool remove_p;
2107 /* Nothrow makes more function to not lead to return and improve
2108 later analysis. */
2109 propagate_nothrow ();
2110 propagate_malloc ();
2111 remove_p = propagate_pure_const ();
2113 delete funct_state_summaries;
2114 return remove_p ? TODO_remove_functions : 0;
2117 static bool
2118 gate_pure_const (void)
2120 return flag_ipa_pure_const || in_lto_p;
2123 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2124 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2125 pure_const_generate_summary, /* generate_summary */
2126 pure_const_write_summary, /* write_summary */
2127 pure_const_read_summary, /* read_summary */
2128 NULL, /* write_optimization_summary */
2129 NULL, /* read_optimization_summary */
2130 NULL, /* stmt_fixup */
2131 0, /* function_transform_todo_flags_start */
2132 NULL, /* function_transform */
2133 NULL), /* variable_transform */
2134 init_p (false) {}
2136 ipa_opt_pass_d *
2137 make_pass_ipa_pure_const (gcc::context *ctxt)
2139 return new pass_ipa_pure_const (ctxt);
2142 /* Simple local pass for pure const discovery reusing the analysis from
2143 ipa_pure_const. This pass is effective when executed together with
2144 other optimization passes in early optimization pass queue. */
2146 namespace {
2148 const pass_data pass_data_local_pure_const =
2150 GIMPLE_PASS, /* type */
2151 "local-pure-const", /* name */
2152 OPTGROUP_NONE, /* optinfo_flags */
2153 TV_IPA_PURE_CONST, /* tv_id */
2154 0, /* properties_required */
2155 0, /* properties_provided */
2156 0, /* properties_destroyed */
2157 0, /* todo_flags_start */
2158 0, /* todo_flags_finish */
2161 class pass_local_pure_const : public gimple_opt_pass
2163 public:
2164 pass_local_pure_const (gcc::context *ctxt)
2165 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2168 /* opt_pass methods: */
2169 opt_pass * clone () final override
2171 return new pass_local_pure_const (m_ctxt);
2173 bool gate (function *) final override { return gate_pure_const (); }
2174 unsigned int execute (function *) final override;
2176 }; // class pass_local_pure_const
2178 unsigned int
2179 pass_local_pure_const::execute (function *fun)
2181 bool changed = false;
2182 funct_state l;
2183 bool skip;
2184 struct cgraph_node *node;
2186 node = cgraph_node::get (current_function_decl);
2187 skip = skip_function_for_local_pure_const (node);
2189 if (!warn_suggest_attribute_const
2190 && !warn_suggest_attribute_pure
2191 && skip)
2192 return 0;
2194 l = analyze_function (node, false);
2196 /* Do NORETURN discovery. */
2197 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2198 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2200 warn_function_noreturn (fun->decl);
2201 if (dump_file)
2202 fprintf (dump_file, "Function found to be noreturn: %s\n",
2203 current_function_name ());
2205 /* Update declaration and reduce profile to executed once. */
2206 if (cgraph_node::get (current_function_decl)->set_noreturn_flag (true))
2207 changed = true;
2208 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2209 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2212 switch (l->pure_const_state)
2214 case IPA_CONST:
2215 changed |= ipa_make_function_const
2216 (cgraph_node::get (current_function_decl), l->looping, true);
2217 break;
2219 case IPA_PURE:
2220 changed |= ipa_make_function_pure
2221 (cgraph_node::get (current_function_decl), l->looping, true);
2222 break;
2224 default:
2225 break;
2227 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2229 node->set_nothrow_flag (true);
2230 changed = true;
2231 if (dump_file)
2232 fprintf (dump_file, "Function found to be nothrow: %s\n",
2233 current_function_name ());
2236 if (l->malloc_state == STATE_MALLOC
2237 && !DECL_IS_MALLOC (current_function_decl))
2239 node->set_malloc_flag (true);
2240 if (warn_suggest_attribute_malloc)
2241 warn_function_malloc (node->decl);
2242 changed = true;
2243 if (dump_file)
2244 fprintf (dump_file, "Function found to be malloc: %s\n",
2245 node->dump_name ());
2248 free (l);
2249 if (changed)
2250 return execute_fixup_cfg ();
2251 else
2252 return 0;
2255 } // anon namespace
2257 gimple_opt_pass *
2258 make_pass_local_pure_const (gcc::context *ctxt)
2260 return new pass_local_pure_const (ctxt);
2263 /* Emit noreturn warnings. */
2265 namespace {
2267 const pass_data pass_data_warn_function_noreturn =
2269 GIMPLE_PASS, /* type */
2270 "*warn_function_noreturn", /* name */
2271 OPTGROUP_NONE, /* optinfo_flags */
2272 TV_NONE, /* tv_id */
2273 PROP_cfg, /* properties_required */
2274 0, /* properties_provided */
2275 0, /* properties_destroyed */
2276 0, /* todo_flags_start */
2277 0, /* todo_flags_finish */
2280 class pass_warn_function_noreturn : public gimple_opt_pass
2282 public:
2283 pass_warn_function_noreturn (gcc::context *ctxt)
2284 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2287 /* opt_pass methods: */
2288 bool gate (function *) final override
2290 return warn_suggest_attribute_noreturn;
2292 unsigned int execute (function *fun) final override
2294 if (!TREE_THIS_VOLATILE (current_function_decl)
2295 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2296 warn_function_noreturn (current_function_decl);
2297 return 0;
2300 }; // class pass_warn_function_noreturn
2302 } // anon namespace
2304 gimple_opt_pass *
2305 make_pass_warn_function_noreturn (gcc::context *ctxt)
2307 return new pass_warn_function_noreturn (ctxt);
2310 /* Simple local pass for pure const discovery reusing the analysis from
2311 ipa_pure_const. This pass is effective when executed together with
2312 other optimization passes in early optimization pass queue. */
2314 namespace {
2316 const pass_data pass_data_nothrow =
2318 GIMPLE_PASS, /* type */
2319 "nothrow", /* name */
2320 OPTGROUP_NONE, /* optinfo_flags */
2321 TV_IPA_PURE_CONST, /* tv_id */
2322 0, /* properties_required */
2323 0, /* properties_provided */
2324 0, /* properties_destroyed */
2325 0, /* todo_flags_start */
2326 0, /* todo_flags_finish */
2329 class pass_nothrow : public gimple_opt_pass
2331 public:
2332 pass_nothrow (gcc::context *ctxt)
2333 : gimple_opt_pass (pass_data_nothrow, ctxt)
2336 /* opt_pass methods: */
2337 opt_pass * clone () final override { return new pass_nothrow (m_ctxt); }
2338 bool gate (function *) final override { return optimize; }
2339 unsigned int execute (function *) final override;
2341 }; // class pass_nothrow
2343 unsigned int
2344 pass_nothrow::execute (function *)
2346 struct cgraph_node *node;
2347 basic_block this_block;
2349 if (TREE_NOTHROW (current_function_decl))
2350 return 0;
2352 node = cgraph_node::get (current_function_decl);
2354 /* We run during lowering, we cannot really use availability yet. */
2355 if (cgraph_node::get (current_function_decl)->get_availability ()
2356 <= AVAIL_INTERPOSABLE)
2358 if (dump_file)
2359 fprintf (dump_file, "Function is interposable;"
2360 " not analyzing.\n");
2361 return true;
2364 FOR_EACH_BB_FN (this_block, cfun)
2366 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2367 !gsi_end_p (gsi);
2368 gsi_next (&gsi))
2369 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2371 if (is_gimple_call (gsi_stmt (gsi)))
2373 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2374 if (callee_t && recursive_call_p (current_function_decl,
2375 callee_t))
2376 continue;
2379 if (dump_file)
2381 fprintf (dump_file, "Statement can throw: ");
2382 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2384 return 0;
2388 node->set_nothrow_flag (true);
2390 bool cfg_changed = false;
2391 if (self_recursive_p (node))
2392 FOR_EACH_BB_FN (this_block, cfun)
2393 if (gcall *g = safe_dyn_cast <gcall *> (*gsi_last_bb (this_block)))
2395 tree callee_t = gimple_call_fndecl (g);
2396 if (callee_t
2397 && recursive_call_p (current_function_decl, callee_t)
2398 && maybe_clean_eh_stmt (g)
2399 && gimple_purge_dead_eh_edges (this_block))
2400 cfg_changed = true;
2403 if (dump_file)
2404 fprintf (dump_file, "Function found to be nothrow: %s\n",
2405 current_function_name ());
2406 return cfg_changed ? TODO_cleanup_cfg : 0;
2409 } // anon namespace
2411 gimple_opt_pass *
2412 make_pass_nothrow (gcc::context *ctxt)
2414 return new pass_nothrow (ctxt);