Default to dwarf version 4 on hppa64-hpux
[official-gcc.git] / gcc / ipa-pure-const.c
bloba84a4eb7ac0fa180cd820b8994b90f0345ce4898
1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "backend.h"
38 #include "target.h"
39 #include "tree.h"
40 #include "gimple.h"
41 #include "tree-pass.h"
42 #include "tree-streamer.h"
43 #include "cgraph.h"
44 #include "diagnostic.h"
45 #include "calls.h"
46 #include "cfganal.h"
47 #include "tree-eh.h"
48 #include "gimple-iterator.h"
49 #include "gimple-walk.h"
50 #include "tree-cfg.h"
51 #include "tree-ssa-loop-niter.h"
52 #include "langhooks.h"
53 #include "ipa-utils.h"
54 #include "gimple-pretty-print.h"
55 #include "cfgloop.h"
56 #include "tree-scalar-evolution.h"
57 #include "intl.h"
58 #include "opts.h"
59 #include "ssa.h"
60 #include "alloc-pool.h"
61 #include "symbol-summary.h"
62 #include "ipa-prop.h"
63 #include "ipa-fnsummary.h"
64 #include "symtab-thunks.h"
66 /* Lattice values for const and pure functions. Everything starts out
67 being const, then may drop to pure and then neither depending on
68 what is found. */
69 enum pure_const_state_e
71 IPA_CONST,
72 IPA_PURE,
73 IPA_NEITHER
76 static const char *pure_const_names[3] = {"const", "pure", "neither"};
78 enum malloc_state_e
80 STATE_MALLOC_TOP,
81 STATE_MALLOC,
82 STATE_MALLOC_BOTTOM
85 static const char *malloc_state_names[] = {"malloc_top", "malloc", "malloc_bottom"};
87 /* Holder for the const_state. There is one of these per function
88 decl. */
89 class funct_state_d
91 public:
92 funct_state_d (): pure_const_state (IPA_NEITHER),
93 state_previously_known (IPA_NEITHER), looping_previously_known (true),
94 looping (true), can_throw (true), can_free (true),
95 malloc_state (STATE_MALLOC_BOTTOM) {}
97 funct_state_d (const funct_state_d &s): pure_const_state (s.pure_const_state),
98 state_previously_known (s.state_previously_known),
99 looping_previously_known (s.looping_previously_known),
100 looping (s.looping), can_throw (s.can_throw), can_free (s.can_free),
101 malloc_state (s.malloc_state) {}
103 /* See above. */
104 enum pure_const_state_e pure_const_state;
105 /* What user set here; we can be always sure about this. */
106 enum pure_const_state_e state_previously_known;
107 bool looping_previously_known;
109 /* True if the function could possibly infinite loop. There are a
110 lot of ways that this could be determined. We are pretty
111 conservative here. While it is possible to cse pure and const
112 calls, it is not legal to have dce get rid of the call if there
113 is a possibility that the call could infinite loop since this is
114 a behavioral change. */
115 bool looping;
117 bool can_throw;
119 /* If function can call free, munmap or otherwise make previously
120 non-trapping memory accesses trapping. */
121 bool can_free;
123 enum malloc_state_e malloc_state;
126 typedef class funct_state_d * funct_state;
128 /* The storage of the funct_state is abstracted because there is the
129 possibility that it may be desirable to move this to the cgraph
130 local info. */
132 class funct_state_summary_t:
133 public fast_function_summary <funct_state_d *, va_heap>
135 public:
136 funct_state_summary_t (symbol_table *symtab):
137 fast_function_summary <funct_state_d *, va_heap> (symtab) {}
139 virtual void insert (cgraph_node *, funct_state_d *state);
140 virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
141 funct_state_d *src_data,
142 funct_state_d *dst_data);
145 static funct_state_summary_t *funct_state_summaries = NULL;
147 static bool gate_pure_const (void);
149 namespace {
151 const pass_data pass_data_ipa_pure_const =
153 IPA_PASS, /* type */
154 "pure-const", /* name */
155 OPTGROUP_NONE, /* optinfo_flags */
156 TV_IPA_PURE_CONST, /* tv_id */
157 0, /* properties_required */
158 0, /* properties_provided */
159 0, /* properties_destroyed */
160 0, /* todo_flags_start */
161 0, /* todo_flags_finish */
164 class pass_ipa_pure_const : public ipa_opt_pass_d
166 public:
167 pass_ipa_pure_const(gcc::context *ctxt);
169 /* opt_pass methods: */
170 bool gate (function *) { return gate_pure_const (); }
171 unsigned int execute (function *fun);
173 void register_hooks (void);
175 private:
176 bool init_p;
177 }; // class pass_ipa_pure_const
179 } // anon namespace
181 /* Try to guess if function body will always be visible to compiler
182 when compiling the call and whether compiler will be able
183 to propagate the information by itself. */
185 static bool
186 function_always_visible_to_compiler_p (tree decl)
188 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl)
189 || DECL_COMDAT (decl));
192 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
193 is true if the function is known to be finite. The diagnostic is
194 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
195 OPTION, this function may initialize it and it is always returned
196 by the function. */
198 static hash_set<tree> *
199 suggest_attribute (int option, tree decl, bool known_finite,
200 hash_set<tree> *warned_about,
201 const char * attrib_name)
203 if (!option_enabled (option, lang_hooks.option_lang_mask (), &global_options))
204 return warned_about;
205 if (TREE_THIS_VOLATILE (decl)
206 || (known_finite && function_always_visible_to_compiler_p (decl)))
207 return warned_about;
209 if (!warned_about)
210 warned_about = new hash_set<tree>;
211 if (warned_about->contains (decl))
212 return warned_about;
213 warned_about->add (decl);
214 warning_at (DECL_SOURCE_LOCATION (decl),
215 option,
216 known_finite
217 ? G_("function might be candidate for attribute %qs")
218 : G_("function might be candidate for attribute %qs"
219 " if it is known to return normally"), attrib_name);
220 return warned_about;
223 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
224 is true if the function is known to be finite. */
226 static void
227 warn_function_pure (tree decl, bool known_finite)
229 /* Declaring a void function pure makes no sense and is diagnosed
230 by -Wattributes because calling it would have no effect. */
231 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
232 return;
234 static hash_set<tree> *warned_about;
235 warned_about
236 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
237 known_finite, warned_about, "pure");
240 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
241 is true if the function is known to be finite. */
243 static void
244 warn_function_const (tree decl, bool known_finite)
246 /* Declaring a void function const makes no sense is diagnosed
247 by -Wattributes because calling it would have no effect. */
248 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
249 return;
251 static hash_set<tree> *warned_about;
252 warned_about
253 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
254 known_finite, warned_about, "const");
257 /* Emit suggestion about __attribute__((malloc)) for DECL. */
259 static void
260 warn_function_malloc (tree decl)
262 static hash_set<tree> *warned_about;
263 warned_about
264 = suggest_attribute (OPT_Wsuggest_attribute_malloc, decl,
265 true, warned_about, "malloc");
268 /* Emit suggestion about __attribute__((noreturn)) for DECL. */
270 static void
271 warn_function_noreturn (tree decl)
273 tree original_decl = decl;
275 static hash_set<tree> *warned_about;
276 if (!lang_hooks.missing_noreturn_ok_p (decl)
277 && targetm.warn_func_return (decl))
278 warned_about
279 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, original_decl,
280 true, warned_about, "noreturn");
283 void
284 warn_function_cold (tree decl)
286 tree original_decl = decl;
288 static hash_set<tree> *warned_about;
289 warned_about
290 = suggest_attribute (OPT_Wsuggest_attribute_cold, original_decl,
291 true, warned_about, "cold");
294 /* Check to see if the use (or definition when CHECKING_WRITE is true)
295 variable T is legal in a function that is either pure or const. */
297 static inline void
298 check_decl (funct_state local,
299 tree t, bool checking_write, bool ipa)
301 /* Do not want to do anything with volatile except mark any
302 function that uses one to be not const or pure. */
303 if (TREE_THIS_VOLATILE (t))
305 local->pure_const_state = IPA_NEITHER;
306 if (dump_file)
307 fprintf (dump_file, " Volatile operand is not const/pure\n");
308 return;
311 /* Do not care about a local automatic that is not static. */
312 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
313 return;
315 /* If the variable has the "used" attribute, treat it as if it had a
316 been touched by the devil. */
317 if (DECL_PRESERVE_P (t))
319 local->pure_const_state = IPA_NEITHER;
320 if (dump_file)
321 fprintf (dump_file, " Used static/global variable is not const/pure\n");
322 return;
325 /* In IPA mode we are not interested in checking actual loads and stores;
326 they will be processed at propagation time using ipa_ref. */
327 if (ipa)
328 return;
330 /* Since we have dealt with the locals and params cases above, if we
331 are CHECKING_WRITE, this cannot be a pure or constant
332 function. */
333 if (checking_write)
335 local->pure_const_state = IPA_NEITHER;
336 if (dump_file)
337 fprintf (dump_file, " static/global memory write is not const/pure\n");
338 return;
341 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
343 /* Readonly reads are safe. */
344 if (TREE_READONLY (t))
345 return; /* Read of a constant, do not change the function state. */
346 else
348 if (dump_file)
349 fprintf (dump_file, " global memory read is not const\n");
350 /* Just a regular read. */
351 if (local->pure_const_state == IPA_CONST)
352 local->pure_const_state = IPA_PURE;
355 else
357 /* Compilation level statics can be read if they are readonly
358 variables. */
359 if (TREE_READONLY (t))
360 return;
362 if (dump_file)
363 fprintf (dump_file, " static memory read is not const\n");
364 /* Just a regular read. */
365 if (local->pure_const_state == IPA_CONST)
366 local->pure_const_state = IPA_PURE;
371 /* Check to see if the use (or definition when CHECKING_WRITE is true)
372 variable T is legal in a function that is either pure or const. */
374 static inline void
375 check_op (funct_state local, tree t, bool checking_write)
377 t = get_base_address (t);
378 if (t && TREE_THIS_VOLATILE (t))
380 local->pure_const_state = IPA_NEITHER;
381 if (dump_file)
382 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
383 return;
385 else if (refs_local_or_readonly_memory_p (t))
387 if (dump_file)
388 fprintf (dump_file, " Indirect ref to local or readonly "
389 "memory is OK\n");
390 return;
392 else if (checking_write)
394 local->pure_const_state = IPA_NEITHER;
395 if (dump_file)
396 fprintf (dump_file, " Indirect ref write is not const/pure\n");
397 return;
399 else
401 if (dump_file)
402 fprintf (dump_file, " Indirect ref read is not const\n");
403 if (local->pure_const_state == IPA_CONST)
404 local->pure_const_state = IPA_PURE;
408 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
410 static void
411 state_from_flags (enum pure_const_state_e *state, bool *looping,
412 int flags, bool cannot_lead_to_return)
414 *looping = false;
415 if (flags & ECF_LOOPING_CONST_OR_PURE)
417 *looping = true;
418 if (dump_file && (dump_flags & TDF_DETAILS))
419 fprintf (dump_file, " looping\n");
421 if (flags & ECF_CONST)
423 *state = IPA_CONST;
424 if (dump_file && (dump_flags & TDF_DETAILS))
425 fprintf (dump_file, " const\n");
427 else if (flags & ECF_PURE)
429 *state = IPA_PURE;
430 if (dump_file && (dump_flags & TDF_DETAILS))
431 fprintf (dump_file, " pure\n");
433 else if (cannot_lead_to_return)
435 *state = IPA_PURE;
436 *looping = true;
437 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, " ignoring side effects->pure looping\n");
440 else
442 if (dump_file && (dump_flags & TDF_DETAILS))
443 fprintf (dump_file, " neither\n");
444 *state = IPA_NEITHER;
445 *looping = true;
449 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
450 into STATE and LOOPING better of the two variants.
451 Be sure to merge looping correctly. IPA_NEITHER functions
452 have looping 0 even if they don't have to return. */
454 static inline void
455 better_state (enum pure_const_state_e *state, bool *looping,
456 enum pure_const_state_e state2, bool looping2)
458 if (state2 < *state)
460 if (*state == IPA_NEITHER)
461 *looping = looping2;
462 else
463 *looping = MIN (*looping, looping2);
464 *state = state2;
466 else if (state2 != IPA_NEITHER)
467 *looping = MIN (*looping, looping2);
470 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
471 into STATE and LOOPING worse of the two variants.
472 N is the actual node called. */
474 static inline void
475 worse_state (enum pure_const_state_e *state, bool *looping,
476 enum pure_const_state_e state2, bool looping2,
477 struct symtab_node *from,
478 struct symtab_node *to)
480 /* Consider function:
482 bool a(int *p)
484 return *p==*p;
487 During early optimization we will turn this into:
489 bool a(int *p)
491 return true;
494 Now if this function will be detected as CONST however when interposed it
495 may end up being just pure. We always must assume the worst scenario here.
497 if (*state == IPA_CONST && state2 == IPA_CONST
498 && to && !TREE_READONLY (to->decl) && !to->binds_to_current_def_p (from))
500 if (dump_file && (dump_flags & TDF_DETAILS))
501 fprintf (dump_file, "Dropping state to PURE because call to %s may not "
502 "bind to current def.\n", to->dump_name ());
503 state2 = IPA_PURE;
505 *state = MAX (*state, state2);
506 *looping = MAX (*looping, looping2);
509 /* Recognize special cases of builtins that are by themselves not pure or const
510 but function using them is. */
511 static bool
512 special_builtin_state (enum pure_const_state_e *state, bool *looping,
513 tree callee)
515 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
516 switch (DECL_FUNCTION_CODE (callee))
518 case BUILT_IN_RETURN:
519 case BUILT_IN_UNREACHABLE:
520 CASE_BUILT_IN_ALLOCA:
521 case BUILT_IN_STACK_SAVE:
522 case BUILT_IN_STACK_RESTORE:
523 case BUILT_IN_EH_POINTER:
524 case BUILT_IN_EH_FILTER:
525 case BUILT_IN_UNWIND_RESUME:
526 case BUILT_IN_CXA_END_CLEANUP:
527 case BUILT_IN_EH_COPY_VALUES:
528 case BUILT_IN_FRAME_ADDRESS:
529 case BUILT_IN_APPLY_ARGS:
530 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
531 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
532 *looping = false;
533 *state = IPA_CONST;
534 return true;
535 case BUILT_IN_PREFETCH:
536 *looping = true;
537 *state = IPA_CONST;
538 return true;
539 default:
540 break;
542 return false;
545 /* Check the parameters of a function call to CALL_EXPR to see if
546 there are any references in the parameters that are not allowed for
547 pure or const functions. Also check to see if this is either an
548 indirect call, a call outside the compilation unit, or has special
549 attributes that may also effect the purity. The CALL_EXPR node for
550 the entire call expression. */
552 static void
553 check_call (funct_state local, gcall *call, bool ipa)
555 int flags = gimple_call_flags (call);
556 tree callee_t = gimple_call_fndecl (call);
557 bool possibly_throws = stmt_could_throw_p (cfun, call);
558 bool possibly_throws_externally = (possibly_throws
559 && stmt_can_throw_external (cfun, call));
561 if (possibly_throws)
563 unsigned int i;
564 for (i = 0; i < gimple_num_ops (call); i++)
565 if (gimple_op (call, i)
566 && tree_could_throw_p (gimple_op (call, i)))
568 if (possibly_throws && cfun->can_throw_non_call_exceptions)
570 if (dump_file)
571 fprintf (dump_file, " operand can throw; looping\n");
572 local->looping = true;
574 if (possibly_throws_externally)
576 if (dump_file)
577 fprintf (dump_file, " operand can throw externally\n");
578 local->can_throw = true;
583 /* The const and pure flags are set by a variety of places in the
584 compiler (including here). If someone has already set the flags
585 for the callee, (such as for some of the builtins) we will use
586 them, otherwise we will compute our own information.
588 Const and pure functions have less clobber effects than other
589 functions so we process these first. Otherwise if it is a call
590 outside the compilation unit or an indirect call we punt. This
591 leaves local calls which will be processed by following the call
592 graph. */
593 if (callee_t)
595 enum pure_const_state_e call_state;
596 bool call_looping;
598 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL)
599 && !nonfreeing_call_p (call))
600 local->can_free = true;
602 if (special_builtin_state (&call_state, &call_looping, callee_t))
604 worse_state (&local->pure_const_state, &local->looping,
605 call_state, call_looping,
606 NULL, NULL);
607 return;
609 /* When bad things happen to bad functions, they cannot be const
610 or pure. */
611 if (setjmp_call_p (callee_t))
613 if (dump_file)
614 fprintf (dump_file, " setjmp is not const/pure\n");
615 local->looping = true;
616 local->pure_const_state = IPA_NEITHER;
619 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
620 switch (DECL_FUNCTION_CODE (callee_t))
622 case BUILT_IN_LONGJMP:
623 case BUILT_IN_NONLOCAL_GOTO:
624 if (dump_file)
625 fprintf (dump_file,
626 " longjmp and nonlocal goto is not const/pure\n");
627 local->pure_const_state = IPA_NEITHER;
628 local->looping = true;
629 break;
630 default:
631 break;
634 else if (gimple_call_internal_p (call) && !nonfreeing_call_p (call))
635 local->can_free = true;
637 /* When not in IPA mode, we can still handle self recursion. */
638 if (!ipa && callee_t
639 && recursive_call_p (current_function_decl, callee_t))
641 if (dump_file)
642 fprintf (dump_file, " Recursive call can loop.\n");
643 local->looping = true;
645 /* Either callee is unknown or we are doing local analysis.
646 Look to see if there are any bits available for the callee (such as by
647 declaration or because it is builtin) and process solely on the basis of
648 those bits. Handle internal calls always, those calls don't have
649 corresponding cgraph edges and thus aren't processed during
650 the propagation. */
651 else if (!ipa || gimple_call_internal_p (call))
653 enum pure_const_state_e call_state;
654 bool call_looping;
655 if (possibly_throws && cfun->can_throw_non_call_exceptions)
657 if (dump_file)
658 fprintf (dump_file, " can throw; looping\n");
659 local->looping = true;
661 if (possibly_throws_externally)
663 if (dump_file)
665 fprintf (dump_file, " can throw externally to lp %i\n",
666 lookup_stmt_eh_lp (call));
667 if (callee_t)
668 fprintf (dump_file, " callee:%s\n",
669 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
671 local->can_throw = true;
673 if (dump_file && (dump_flags & TDF_DETAILS))
674 fprintf (dump_file, " checking flags for call:");
675 state_from_flags (&call_state, &call_looping, flags,
676 ((flags & (ECF_NORETURN | ECF_NOTHROW))
677 == (ECF_NORETURN | ECF_NOTHROW))
678 || (!flag_exceptions && (flags & ECF_NORETURN)));
679 worse_state (&local->pure_const_state, &local->looping,
680 call_state, call_looping, NULL, NULL);
682 /* Direct functions calls are handled by IPA propagation. */
685 /* Wrapper around check_decl for loads in local more. */
687 static bool
688 check_load (gimple *, tree op, tree, void *data)
690 if (DECL_P (op))
691 check_decl ((funct_state)data, op, false, false);
692 else
693 check_op ((funct_state)data, op, false);
694 return false;
697 /* Wrapper around check_decl for stores in local more. */
699 static bool
700 check_store (gimple *, tree op, tree, void *data)
702 if (DECL_P (op))
703 check_decl ((funct_state)data, op, true, false);
704 else
705 check_op ((funct_state)data, op, true);
706 return false;
709 /* Wrapper around check_decl for loads in ipa mode. */
711 static bool
712 check_ipa_load (gimple *, tree op, tree, void *data)
714 if (DECL_P (op))
715 check_decl ((funct_state)data, op, false, true);
716 else
717 check_op ((funct_state)data, op, false);
718 return false;
721 /* Wrapper around check_decl for stores in ipa mode. */
723 static bool
724 check_ipa_store (gimple *, tree op, tree, void *data)
726 if (DECL_P (op))
727 check_decl ((funct_state)data, op, true, true);
728 else
729 check_op ((funct_state)data, op, true);
730 return false;
733 /* Look into pointer pointed to by GSIP and figure out what interesting side
734 effects it has. */
735 static void
736 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
738 gimple *stmt = gsi_stmt (*gsip);
740 if (is_gimple_debug (stmt))
741 return;
743 /* Do consider clobber as side effects before IPA, so we rather inline
744 C++ destructors and keep clobber semantics than eliminate them.
746 Similar logic is in ipa-modref.
748 TODO: We may get smarter during early optimizations on these and let
749 functions containing only clobbers to be optimized more. This is a common
750 case of C++ destructors. */
752 if ((ipa || cfun->after_inlining) && gimple_clobber_p (stmt))
753 return;
755 if (dump_file)
757 fprintf (dump_file, " scanning: ");
758 print_gimple_stmt (dump_file, stmt, 0);
761 if (gimple_has_volatile_ops (stmt)
762 && !gimple_clobber_p (stmt))
764 local->pure_const_state = IPA_NEITHER;
765 if (dump_file)
766 fprintf (dump_file, " Volatile stmt is not const/pure\n");
769 /* Look for loads and stores. */
770 walk_stmt_load_store_ops (stmt, local,
771 ipa ? check_ipa_load : check_load,
772 ipa ? check_ipa_store : check_store);
774 if (gimple_code (stmt) != GIMPLE_CALL
775 && stmt_could_throw_p (cfun, stmt))
777 if (cfun->can_throw_non_call_exceptions)
779 if (dump_file)
780 fprintf (dump_file, " can throw; looping\n");
781 local->looping = true;
783 if (stmt_can_throw_external (cfun, stmt))
785 if (dump_file)
786 fprintf (dump_file, " can throw externally\n");
787 local->can_throw = true;
789 else
790 if (dump_file)
791 fprintf (dump_file, " can throw\n");
793 switch (gimple_code (stmt))
795 case GIMPLE_CALL:
796 check_call (local, as_a <gcall *> (stmt), ipa);
797 break;
798 case GIMPLE_LABEL:
799 if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
800 /* Target of long jump. */
802 if (dump_file)
803 fprintf (dump_file, " nonlocal label is not const/pure\n");
804 local->pure_const_state = IPA_NEITHER;
806 break;
807 case GIMPLE_ASM:
808 if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
810 if (dump_file)
811 fprintf (dump_file, " memory asm clobber is not const/pure\n");
812 /* Abandon all hope, ye who enter here. */
813 local->pure_const_state = IPA_NEITHER;
814 local->can_free = true;
816 if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
818 if (dump_file)
819 fprintf (dump_file, " volatile is not const/pure\n");
820 /* Abandon all hope, ye who enter here. */
821 local->pure_const_state = IPA_NEITHER;
822 local->looping = true;
823 local->can_free = true;
825 return;
826 default:
827 break;
831 /* Check that RETVAL is used only in STMT and in comparisons against 0.
832 RETVAL is return value of the function and STMT is return stmt. */
834 static bool
835 check_retval_uses (tree retval, gimple *stmt)
837 imm_use_iterator use_iter;
838 gimple *use_stmt;
840 FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, retval)
841 if (gcond *cond = dyn_cast<gcond *> (use_stmt))
843 tree op2 = gimple_cond_rhs (cond);
844 if (!integer_zerop (op2))
845 return false;
847 else if (gassign *ga = dyn_cast<gassign *> (use_stmt))
849 enum tree_code code = gimple_assign_rhs_code (ga);
850 if (TREE_CODE_CLASS (code) != tcc_comparison)
851 return false;
852 if (!integer_zerop (gimple_assign_rhs2 (ga)))
853 return false;
855 else if (is_gimple_debug (use_stmt))
857 else if (use_stmt != stmt)
858 return false;
860 return true;
863 /* malloc_candidate_p() checks if FUN can possibly be annotated with malloc
864 attribute. Currently this function does a very conservative analysis.
865 FUN is considered to be a candidate if
866 1) It returns a value of pointer type.
867 2) SSA_NAME_DEF_STMT (return_value) is either a function call or
868 a phi, and element of phi is either NULL or
869 SSA_NAME_DEF_STMT(element) is function call.
870 3) The return-value has immediate uses only within comparisons (gcond or gassign)
871 and return_stmt (and likewise a phi arg has immediate use only within comparison
872 or the phi stmt). */
874 #define DUMP_AND_RETURN(reason) \
876 if (dump_file && (dump_flags & TDF_DETAILS)) \
877 fprintf (dump_file, "\n%s is not a malloc candidate, reason: %s\n", \
878 (node->dump_name ()), (reason)); \
879 return false; \
882 static bool
883 malloc_candidate_p_1 (function *fun, tree retval, gimple *ret_stmt, bool ipa,
884 bitmap visited)
886 cgraph_node *node = cgraph_node::get_create (fun->decl);
887 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (retval)))
888 return true;
890 if (!check_retval_uses (retval, ret_stmt))
891 DUMP_AND_RETURN("Return value has uses outside return stmt"
892 " and comparisons against 0.")
894 gimple *def = SSA_NAME_DEF_STMT (retval);
896 if (gcall *call_stmt = dyn_cast<gcall *> (def))
898 tree callee_decl = gimple_call_fndecl (call_stmt);
899 if (!callee_decl)
900 return false;
902 if (!ipa && !DECL_IS_MALLOC (callee_decl))
903 DUMP_AND_RETURN("callee_decl does not have malloc attribute for"
904 " non-ipa mode.")
906 cgraph_edge *cs = node->get_edge (call_stmt);
907 if (cs)
909 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
910 es->is_return_callee_uncaptured = true;
914 else if (gphi *phi = dyn_cast<gphi *> (def))
916 bool all_args_zero = true;
917 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
919 tree arg = gimple_phi_arg_def (phi, i);
920 if (integer_zerop (arg))
921 continue;
923 all_args_zero = false;
924 if (TREE_CODE (arg) != SSA_NAME)
925 DUMP_AND_RETURN ("phi arg is not SSA_NAME.");
926 if (!check_retval_uses (arg, phi))
927 DUMP_AND_RETURN ("phi arg has uses outside phi"
928 " and comparisons against 0.")
930 gimple *arg_def = SSA_NAME_DEF_STMT (arg);
931 if (is_a<gphi *> (arg_def))
933 if (!malloc_candidate_p_1 (fun, arg, phi, ipa, visited))
934 DUMP_AND_RETURN ("nested phi fail")
935 continue;
938 gcall *call_stmt = dyn_cast<gcall *> (arg_def);
939 if (!call_stmt)
940 DUMP_AND_RETURN ("phi arg is a not a call_stmt.")
942 tree callee_decl = gimple_call_fndecl (call_stmt);
943 if (!callee_decl)
944 return false;
945 if (!ipa && !DECL_IS_MALLOC (callee_decl))
946 DUMP_AND_RETURN("callee_decl does not have malloc attribute"
947 " for non-ipa mode.")
949 cgraph_edge *cs = node->get_edge (call_stmt);
950 if (cs)
952 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
953 es->is_return_callee_uncaptured = true;
957 if (all_args_zero)
958 DUMP_AND_RETURN ("Return value is a phi with all args equal to 0.")
961 else
962 DUMP_AND_RETURN("def_stmt of return value is not a call or phi-stmt.")
964 return true;
967 static bool
968 malloc_candidate_p (function *fun, bool ipa)
970 basic_block exit_block = EXIT_BLOCK_PTR_FOR_FN (fun);
971 edge e;
972 edge_iterator ei;
973 cgraph_node *node = cgraph_node::get_create (fun->decl);
975 if (EDGE_COUNT (exit_block->preds) == 0
976 || !flag_delete_null_pointer_checks)
977 return false;
979 auto_bitmap visited;
980 FOR_EACH_EDGE (e, ei, exit_block->preds)
982 gimple_stmt_iterator gsi = gsi_last_bb (e->src);
983 greturn *ret_stmt = dyn_cast<greturn *> (gsi_stmt (gsi));
985 if (!ret_stmt)
986 return false;
988 tree retval = gimple_return_retval (ret_stmt);
989 if (!retval)
990 DUMP_AND_RETURN("No return value.")
992 if (TREE_CODE (retval) != SSA_NAME
993 || TREE_CODE (TREE_TYPE (retval)) != POINTER_TYPE)
994 DUMP_AND_RETURN("Return value is not SSA_NAME or not a pointer type.")
996 if (!malloc_candidate_p_1 (fun, retval, ret_stmt, ipa, visited))
997 return false;
1000 if (dump_file && (dump_flags & TDF_DETAILS))
1001 fprintf (dump_file, "\nFound %s to be candidate for malloc attribute\n",
1002 IDENTIFIER_POINTER (DECL_NAME (fun->decl)));
1003 return true;
1006 #undef DUMP_AND_RETURN
1008 /* This is the main routine for finding the reference patterns for
1009 global variables within a function FN. */
1011 static funct_state
1012 analyze_function (struct cgraph_node *fn, bool ipa)
1014 tree decl = fn->decl;
1015 funct_state l;
1016 basic_block this_block;
1018 l = XCNEW (class funct_state_d);
1019 l->pure_const_state = IPA_CONST;
1020 l->state_previously_known = IPA_NEITHER;
1021 l->looping_previously_known = true;
1022 l->looping = false;
1023 l->can_throw = false;
1024 l->can_free = false;
1025 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
1026 flags_from_decl_or_type (fn->decl),
1027 fn->cannot_return_p ());
1029 if (fn->thunk || fn->alias)
1031 /* Thunk gets propagated through, so nothing interesting happens. */
1032 gcc_assert (ipa);
1033 if (fn->thunk && thunk_info::get (fn)->virtual_offset_p)
1034 l->pure_const_state = IPA_NEITHER;
1035 return l;
1038 if (dump_file)
1040 fprintf (dump_file, "\n\n local analysis of %s\n ",
1041 fn->dump_name ());
1044 push_cfun (DECL_STRUCT_FUNCTION (decl));
1046 FOR_EACH_BB_FN (this_block, cfun)
1048 gimple_stmt_iterator gsi;
1049 struct walk_stmt_info wi;
1051 memset (&wi, 0, sizeof (wi));
1052 for (gsi = gsi_start_bb (this_block);
1053 !gsi_end_p (gsi);
1054 gsi_next (&gsi))
1056 check_stmt (&gsi, l, ipa);
1057 if (l->pure_const_state == IPA_NEITHER
1058 && l->looping
1059 && l->can_throw
1060 && l->can_free)
1061 goto end;
1065 end:
1066 if (l->pure_const_state != IPA_NEITHER)
1068 /* Const functions cannot have back edges (an
1069 indication of possible infinite loop side
1070 effect. */
1071 if (mark_dfs_back_edges ())
1073 /* Preheaders are needed for SCEV to work.
1074 Simple latches and recorded exits improve chances that loop will
1075 proved to be finite in testcases such as in loop-15.c
1076 and loop-24.c */
1077 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
1078 | LOOPS_HAVE_SIMPLE_LATCHES
1079 | LOOPS_HAVE_RECORDED_EXITS);
1080 if (dump_file && (dump_flags & TDF_DETAILS))
1081 flow_loops_dump (dump_file, NULL, 0);
1082 if (mark_irreducible_loops ())
1084 if (dump_file)
1085 fprintf (dump_file, " has irreducible loops\n");
1086 l->looping = true;
1088 else
1090 scev_initialize ();
1091 for (auto loop : loops_list (cfun, 0))
1092 if (!finite_loop_p (loop))
1094 if (dump_file)
1095 fprintf (dump_file, " cannot prove finiteness of "
1096 "loop %i\n", loop->num);
1097 l->looping =true;
1098 break;
1100 scev_finalize ();
1102 loop_optimizer_finalize ();
1106 if (dump_file && (dump_flags & TDF_DETAILS))
1107 fprintf (dump_file, " checking previously known:");
1109 better_state (&l->pure_const_state, &l->looping,
1110 l->state_previously_known,
1111 l->looping_previously_known);
1112 if (TREE_NOTHROW (decl))
1113 l->can_throw = false;
1115 l->malloc_state = STATE_MALLOC_BOTTOM;
1116 if (DECL_IS_MALLOC (decl))
1117 l->malloc_state = STATE_MALLOC;
1118 else if (ipa && malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), true))
1119 l->malloc_state = STATE_MALLOC_TOP;
1120 else if (malloc_candidate_p (DECL_STRUCT_FUNCTION (decl), false))
1121 l->malloc_state = STATE_MALLOC;
1123 pop_cfun ();
1124 if (dump_file)
1126 if (l->looping)
1127 fprintf (dump_file, "Function is locally looping.\n");
1128 if (l->can_throw)
1129 fprintf (dump_file, "Function is locally throwing.\n");
1130 if (l->pure_const_state == IPA_CONST)
1131 fprintf (dump_file, "Function is locally const.\n");
1132 if (l->pure_const_state == IPA_PURE)
1133 fprintf (dump_file, "Function is locally pure.\n");
1134 if (l->can_free)
1135 fprintf (dump_file, "Function can locally free.\n");
1136 if (l->malloc_state == STATE_MALLOC)
1137 fprintf (dump_file, "Function is locally malloc.\n");
1139 return l;
1142 void
1143 funct_state_summary_t::insert (cgraph_node *node, funct_state_d *state)
1145 /* There are some shared nodes, in particular the initializers on
1146 static declarations. We do not need to scan them more than once
1147 since all we would be interested in are the addressof
1148 operations. */
1149 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1151 funct_state_d *a = analyze_function (node, true);
1152 new (state) funct_state_d (*a);
1153 free (a);
1155 else
1156 /* Do not keep stale summaries. */
1157 funct_state_summaries->remove (node);
1160 /* Called when new clone is inserted to callgraph late. */
1162 void
1163 funct_state_summary_t::duplicate (cgraph_node *, cgraph_node *dst,
1164 funct_state_d *src_data,
1165 funct_state_d *dst_data)
1167 new (dst_data) funct_state_d (*src_data);
1168 if (dst_data->malloc_state == STATE_MALLOC
1169 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (dst->decl))))
1170 dst_data->malloc_state = STATE_MALLOC_BOTTOM;
1174 void
1175 pass_ipa_pure_const::
1176 register_hooks (void)
1178 if (init_p)
1179 return;
1181 init_p = true;
1183 funct_state_summaries = new funct_state_summary_t (symtab);
1187 /* Analyze each function in the cgraph to see if it is locally PURE or
1188 CONST. */
1190 static void
1191 pure_const_generate_summary (void)
1193 struct cgraph_node *node;
1195 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1196 pass->register_hooks ();
1198 /* Process all of the functions.
1200 We process AVAIL_INTERPOSABLE functions. We cannot use the results
1201 by default, but the info can be used at LTO with -fwhole-program or
1202 when function got cloned and the clone is AVAILABLE. */
1204 FOR_EACH_DEFINED_FUNCTION (node)
1205 if (opt_for_fn (node->decl, flag_ipa_pure_const))
1207 funct_state_d *a = analyze_function (node, true);
1208 new (funct_state_summaries->get_create (node)) funct_state_d (*a);
1209 free (a);
1214 /* Serialize the ipa info for lto. */
1216 static void
1217 pure_const_write_summary (void)
1219 struct cgraph_node *node;
1220 struct lto_simple_output_block *ob
1221 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
1222 unsigned int count = 0;
1223 lto_symtab_encoder_iterator lsei;
1224 lto_symtab_encoder_t encoder;
1226 encoder = lto_get_out_decl_state ()->symtab_node_encoder;
1228 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1229 lsei_next_function_in_partition (&lsei))
1231 node = lsei_cgraph_node (lsei);
1232 if (node->definition && funct_state_summaries->exists (node))
1233 count++;
1236 streamer_write_uhwi_stream (ob->main_stream, count);
1238 /* Process all of the functions. */
1239 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
1240 lsei_next_function_in_partition (&lsei))
1242 node = lsei_cgraph_node (lsei);
1243 funct_state_d *fs = funct_state_summaries->get (node);
1244 if (node->definition && fs != NULL)
1246 struct bitpack_d bp;
1247 int node_ref;
1248 lto_symtab_encoder_t encoder;
1250 encoder = ob->decl_state->symtab_node_encoder;
1251 node_ref = lto_symtab_encoder_encode (encoder, node);
1252 streamer_write_uhwi_stream (ob->main_stream, node_ref);
1254 /* Note that flags will need to be read in the opposite
1255 order as we are pushing the bitflags into FLAGS. */
1256 bp = bitpack_create (ob->main_stream);
1257 bp_pack_value (&bp, fs->pure_const_state, 2);
1258 bp_pack_value (&bp, fs->state_previously_known, 2);
1259 bp_pack_value (&bp, fs->looping_previously_known, 1);
1260 bp_pack_value (&bp, fs->looping, 1);
1261 bp_pack_value (&bp, fs->can_throw, 1);
1262 bp_pack_value (&bp, fs->can_free, 1);
1263 bp_pack_value (&bp, fs->malloc_state, 2);
1264 streamer_write_bitpack (&bp);
1268 lto_destroy_simple_output_block (ob);
1272 /* Deserialize the ipa info for lto. */
1274 static void
1275 pure_const_read_summary (void)
1277 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1278 struct lto_file_decl_data *file_data;
1279 unsigned int j = 0;
1281 pass_ipa_pure_const *pass = static_cast <pass_ipa_pure_const *> (current_pass);
1282 pass->register_hooks ();
1284 while ((file_data = file_data_vec[j++]))
1286 const char *data;
1287 size_t len;
1288 class lto_input_block *ib
1289 = lto_create_simple_input_block (file_data,
1290 LTO_section_ipa_pure_const,
1291 &data, &len);
1292 if (ib)
1294 unsigned int i;
1295 unsigned int count = streamer_read_uhwi (ib);
1297 for (i = 0; i < count; i++)
1299 unsigned int index;
1300 struct cgraph_node *node;
1301 struct bitpack_d bp;
1302 funct_state fs;
1303 lto_symtab_encoder_t encoder;
1305 index = streamer_read_uhwi (ib);
1306 encoder = file_data->symtab_node_encoder;
1307 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
1308 index));
1310 fs = funct_state_summaries->get_create (node);
1311 /* Note that the flags must be read in the opposite
1312 order in which they were written (the bitflags were
1313 pushed into FLAGS). */
1314 bp = streamer_read_bitpack (ib);
1315 fs->pure_const_state
1316 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1317 fs->state_previously_known
1318 = (enum pure_const_state_e) bp_unpack_value (&bp, 2);
1319 fs->looping_previously_known = bp_unpack_value (&bp, 1);
1320 fs->looping = bp_unpack_value (&bp, 1);
1321 fs->can_throw = bp_unpack_value (&bp, 1);
1322 fs->can_free = bp_unpack_value (&bp, 1);
1323 fs->malloc_state
1324 = (enum malloc_state_e) bp_unpack_value (&bp, 2);
1326 if (dump_file)
1328 int flags = flags_from_decl_or_type (node->decl);
1329 fprintf (dump_file, "Read info for %s ", node->dump_name ());
1330 if (flags & ECF_CONST)
1331 fprintf (dump_file, " const");
1332 if (flags & ECF_PURE)
1333 fprintf (dump_file, " pure");
1334 if (flags & ECF_NOTHROW)
1335 fprintf (dump_file, " nothrow");
1336 fprintf (dump_file, "\n pure const state: %s\n",
1337 pure_const_names[fs->pure_const_state]);
1338 fprintf (dump_file, " previously known state: %s\n",
1339 pure_const_names[fs->state_previously_known]);
1340 if (fs->looping)
1341 fprintf (dump_file," function is locally looping\n");
1342 if (fs->looping_previously_known)
1343 fprintf (dump_file," function is previously known looping\n");
1344 if (fs->can_throw)
1345 fprintf (dump_file," function is locally throwing\n");
1346 if (fs->can_free)
1347 fprintf (dump_file," function can locally free\n");
1348 fprintf (dump_file, "\n malloc state: %s\n",
1349 malloc_state_names[fs->malloc_state]);
1353 lto_destroy_simple_input_block (file_data,
1354 LTO_section_ipa_pure_const,
1355 ib, data, len);
1360 /* We only propagate across edges that can throw externally and their callee
1361 is not interposable. */
1363 static bool
1364 ignore_edge_for_nothrow (struct cgraph_edge *e)
1366 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1367 return true;
1369 enum availability avail;
1370 cgraph_node *ultimate_target
1371 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1372 if (avail <= AVAIL_INTERPOSABLE || TREE_NOTHROW (ultimate_target->decl))
1373 return true;
1374 return ((opt_for_fn (e->callee->decl, flag_non_call_exceptions)
1375 && !e->callee->binds_to_current_def_p (e->caller))
1376 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1377 || !opt_for_fn (ultimate_target->decl, flag_ipa_pure_const));
1380 /* Return true if NODE is self recursive function.
1381 Indirectly recursive functions appears as non-trivial strongly
1382 connected components, so we need to care about self recursion
1383 only. */
1385 static bool
1386 self_recursive_p (struct cgraph_node *node)
1388 struct cgraph_edge *e;
1389 for (e = node->callees; e; e = e->next_callee)
1390 if (e->callee->function_symbol () == node)
1391 return true;
1392 return false;
1395 /* Return true if N is cdtor that is not const or pure. In this case we may
1396 need to remove unreachable function if it is marked const/pure. */
1398 static bool
1399 cdtor_p (cgraph_node *n, void *)
1401 if (DECL_STATIC_CONSTRUCTOR (n->decl) || DECL_STATIC_DESTRUCTOR (n->decl))
1402 return ((!TREE_READONLY (n->decl) && !DECL_PURE_P (n->decl))
1403 || DECL_LOOPING_CONST_OR_PURE_P (n->decl));
1404 return false;
1407 /* Skip edges from and to nodes without ipa_pure_const enabled.
1408 Ignore not available symbols. */
1410 static bool
1411 ignore_edge_for_pure_const (struct cgraph_edge *e)
1413 enum availability avail;
1414 cgraph_node *ultimate_target
1415 = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
1417 return (avail <= AVAIL_INTERPOSABLE
1418 || !opt_for_fn (e->caller->decl, flag_ipa_pure_const)
1419 || !opt_for_fn (ultimate_target->decl,
1420 flag_ipa_pure_const));
1423 /* Produce transitive closure over the callgraph and compute pure/const
1424 attributes. */
1426 static bool
1427 propagate_pure_const (void)
1429 struct cgraph_node *node;
1430 struct cgraph_node *w;
1431 struct cgraph_node **order =
1432 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1433 int order_pos;
1434 int i;
1435 struct ipa_dfs_info * w_info;
1436 bool remove_p = false;
1437 bool has_cdtor;
1439 order_pos = ipa_reduced_postorder (order, true,
1440 ignore_edge_for_pure_const);
1441 if (dump_file)
1443 cgraph_node::dump_cgraph (dump_file);
1444 ipa_print_order (dump_file, "reduced", order, order_pos);
1447 /* Propagate the local information through the call graph to produce
1448 the global information. All the nodes within a cycle will have
1449 the same info so we collapse cycles first. Then we can do the
1450 propagation in one pass from the leaves to the roots. */
1451 for (i = 0; i < order_pos; i++ )
1453 enum pure_const_state_e pure_const_state = IPA_CONST;
1454 bool looping = false;
1455 int count = 0;
1456 node = order[i];
1458 if (node->alias)
1459 continue;
1461 if (dump_file && (dump_flags & TDF_DETAILS))
1462 fprintf (dump_file, "Starting cycle\n");
1464 /* Find the worst state for any node in the cycle. */
1465 w = node;
1466 while (w && pure_const_state != IPA_NEITHER)
1468 struct cgraph_edge *e;
1469 struct cgraph_edge *ie;
1470 int i;
1471 struct ipa_ref *ref = NULL;
1473 funct_state w_l = funct_state_summaries->get_create (w);
1474 if (dump_file && (dump_flags & TDF_DETAILS))
1475 fprintf (dump_file, " Visiting %s state:%s looping %i\n",
1476 w->dump_name (),
1477 pure_const_names[w_l->pure_const_state],
1478 w_l->looping);
1480 /* First merge in function body properties.
1481 We are safe to pass NULL as FROM and TO because we will take care
1482 of possible interposition when walking callees. */
1483 worse_state (&pure_const_state, &looping,
1484 w_l->pure_const_state, w_l->looping,
1485 NULL, NULL);
1486 if (pure_const_state == IPA_NEITHER)
1487 break;
1489 count++;
1491 /* We consider recursive cycles as possibly infinite.
1492 This might be relaxed since infinite recursion leads to stack
1493 overflow. */
1494 if (count > 1)
1495 looping = true;
1497 /* Now walk the edges and merge in callee properties. */
1498 for (e = w->callees; e && pure_const_state != IPA_NEITHER;
1499 e = e->next_callee)
1501 enum availability avail;
1502 struct cgraph_node *y = e->callee->
1503 function_or_virtual_thunk_symbol (&avail,
1504 e->caller);
1505 enum pure_const_state_e edge_state = IPA_CONST;
1506 bool edge_looping = false;
1508 if (dump_file && (dump_flags & TDF_DETAILS))
1510 fprintf (dump_file, " Call to %s",
1511 e->callee->dump_name ());
1513 if (avail > AVAIL_INTERPOSABLE)
1515 funct_state y_l = funct_state_summaries->get_create (y);
1517 if (dump_file && (dump_flags & TDF_DETAILS))
1519 fprintf (dump_file,
1520 " state:%s looping:%i\n",
1521 pure_const_names[y_l->pure_const_state],
1522 y_l->looping);
1524 if (y_l->pure_const_state > IPA_PURE
1525 && e->cannot_lead_to_return_p ())
1527 if (dump_file && (dump_flags & TDF_DETAILS))
1528 fprintf (dump_file,
1529 " Ignoring side effects"
1530 " -> pure, looping\n");
1531 edge_state = IPA_PURE;
1532 edge_looping = true;
1534 else
1536 edge_state = y_l->pure_const_state;
1537 edge_looping = y_l->looping;
1540 else if (special_builtin_state (&edge_state, &edge_looping,
1541 y->decl))
1543 else
1544 state_from_flags (&edge_state, &edge_looping,
1545 flags_from_decl_or_type (y->decl),
1546 e->cannot_lead_to_return_p ());
1548 /* Merge the results with what we already know. */
1549 better_state (&edge_state, &edge_looping,
1550 w_l->state_previously_known,
1551 w_l->looping_previously_known);
1552 worse_state (&pure_const_state, &looping,
1553 edge_state, edge_looping, e->caller, e->callee);
1554 if (pure_const_state == IPA_NEITHER)
1555 break;
1558 /* Now process the indirect call. */
1559 for (ie = w->indirect_calls;
1560 ie && pure_const_state != IPA_NEITHER; ie = ie->next_callee)
1562 enum pure_const_state_e edge_state = IPA_CONST;
1563 bool edge_looping = false;
1565 if (dump_file && (dump_flags & TDF_DETAILS))
1566 fprintf (dump_file, " Indirect call");
1567 state_from_flags (&edge_state, &edge_looping,
1568 ie->indirect_info->ecf_flags,
1569 ie->cannot_lead_to_return_p ());
1570 /* Merge the results with what we already know. */
1571 better_state (&edge_state, &edge_looping,
1572 w_l->state_previously_known,
1573 w_l->looping_previously_known);
1574 worse_state (&pure_const_state, &looping,
1575 edge_state, edge_looping, NULL, NULL);
1576 if (pure_const_state == IPA_NEITHER)
1577 break;
1580 /* And finally all loads and stores. */
1581 for (i = 0; w->iterate_reference (i, ref)
1582 && pure_const_state != IPA_NEITHER; i++)
1584 enum pure_const_state_e ref_state = IPA_CONST;
1585 bool ref_looping = false;
1586 switch (ref->use)
1588 case IPA_REF_LOAD:
1589 /* readonly reads are safe. */
1590 if (TREE_READONLY (ref->referred->decl))
1591 break;
1592 if (dump_file && (dump_flags & TDF_DETAILS))
1593 fprintf (dump_file, " nonreadonly global var read\n");
1594 ref_state = IPA_PURE;
1595 break;
1596 case IPA_REF_STORE:
1597 if (ref->cannot_lead_to_return ())
1598 break;
1599 ref_state = IPA_NEITHER;
1600 if (dump_file && (dump_flags & TDF_DETAILS))
1601 fprintf (dump_file, " global var write\n");
1602 break;
1603 case IPA_REF_ADDR:
1604 break;
1605 default:
1606 gcc_unreachable ();
1608 better_state (&ref_state, &ref_looping,
1609 w_l->state_previously_known,
1610 w_l->looping_previously_known);
1611 worse_state (&pure_const_state, &looping,
1612 ref_state, ref_looping, NULL, NULL);
1613 if (pure_const_state == IPA_NEITHER)
1614 break;
1616 w_info = (struct ipa_dfs_info *) w->aux;
1617 w = w_info->next_cycle;
1619 if (dump_file && (dump_flags & TDF_DETAILS))
1620 fprintf (dump_file, "Result %s looping %i\n",
1621 pure_const_names [pure_const_state],
1622 looping);
1624 /* Find the worst state of can_free for any node in the cycle. */
1625 bool can_free = false;
1626 w = node;
1627 while (w && !can_free)
1629 struct cgraph_edge *e;
1630 funct_state w_l = funct_state_summaries->get (w);
1632 if (w_l->can_free
1633 || w->get_availability () == AVAIL_INTERPOSABLE
1634 || w->indirect_calls)
1635 can_free = true;
1637 for (e = w->callees; e && !can_free; e = e->next_callee)
1639 enum availability avail;
1640 struct cgraph_node *y = e->callee->
1641 function_or_virtual_thunk_symbol (&avail,
1642 e->caller);
1644 if (avail > AVAIL_INTERPOSABLE)
1645 can_free = funct_state_summaries->get (y)->can_free;
1646 else
1647 can_free = true;
1649 w_info = (struct ipa_dfs_info *) w->aux;
1650 w = w_info->next_cycle;
1653 /* Copy back the region's pure_const_state which is shared by
1654 all nodes in the region. */
1655 w = node;
1656 while (w)
1658 funct_state w_l = funct_state_summaries->get (w);
1659 enum pure_const_state_e this_state = pure_const_state;
1660 bool this_looping = looping;
1662 w_l->can_free = can_free;
1663 w->nonfreeing_fn = !can_free;
1664 if (!can_free && dump_file)
1665 fprintf (dump_file, "Function found not to call free: %s\n",
1666 w->dump_name ());
1668 if (w_l->state_previously_known != IPA_NEITHER
1669 && this_state > w_l->state_previously_known)
1671 this_state = w_l->state_previously_known;
1672 if (this_state == IPA_NEITHER)
1673 this_looping = w_l->looping_previously_known;
1675 if (!this_looping && self_recursive_p (w))
1676 this_looping = true;
1677 if (!w_l->looping_previously_known)
1678 this_looping = false;
1680 /* All nodes within a cycle share the same info. */
1681 w_l->pure_const_state = this_state;
1682 w_l->looping = this_looping;
1684 /* Inline clones share declaration with their offline copies;
1685 do not modify their declarations since the offline copy may
1686 be different. */
1687 if (!w->inlined_to)
1688 switch (this_state)
1690 case IPA_CONST:
1691 if (!TREE_READONLY (w->decl))
1693 warn_function_const (w->decl, !this_looping);
1694 if (dump_file)
1695 fprintf (dump_file, "Function found to be %sconst: %s\n",
1696 this_looping ? "looping " : "",
1697 w->dump_name ());
1699 /* Turning constructor or destructor to non-looping const/pure
1700 enables us to possibly remove the function completely. */
1701 if (this_looping)
1702 has_cdtor = false;
1703 else
1704 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1705 NULL, true);
1706 if (w->set_const_flag (true, this_looping))
1708 if (dump_file)
1709 fprintf (dump_file,
1710 "Declaration updated to be %sconst: %s\n",
1711 this_looping ? "looping " : "",
1712 w->dump_name ());
1713 remove_p |= has_cdtor;
1715 break;
1717 case IPA_PURE:
1718 if (!DECL_PURE_P (w->decl))
1720 warn_function_pure (w->decl, !this_looping);
1721 if (dump_file)
1722 fprintf (dump_file, "Function found to be %spure: %s\n",
1723 this_looping ? "looping " : "",
1724 w->dump_name ());
1726 if (this_looping)
1727 has_cdtor = false;
1728 else
1729 has_cdtor = w->call_for_symbol_and_aliases (cdtor_p,
1730 NULL, true);
1731 if (w->set_pure_flag (true, this_looping))
1733 if (dump_file)
1734 fprintf (dump_file,
1735 "Declaration updated to be %spure: %s\n",
1736 this_looping ? "looping " : "",
1737 w->dump_name ());
1738 remove_p |= has_cdtor;
1740 break;
1742 default:
1743 break;
1745 w_info = (struct ipa_dfs_info *) w->aux;
1746 w = w_info->next_cycle;
1750 ipa_free_postorder_info ();
1751 free (order);
1752 return remove_p;
1755 /* Produce transitive closure over the callgraph and compute nothrow
1756 attributes. */
1758 static void
1759 propagate_nothrow (void)
1761 struct cgraph_node *node;
1762 struct cgraph_node *w;
1763 struct cgraph_node **order =
1764 XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1765 int order_pos;
1766 int i;
1767 struct ipa_dfs_info * w_info;
1769 order_pos = ipa_reduced_postorder (order, true,
1770 ignore_edge_for_nothrow);
1771 if (dump_file)
1773 cgraph_node::dump_cgraph (dump_file);
1774 ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
1777 /* Propagate the local information through the call graph to produce
1778 the global information. All the nodes within a cycle will have
1779 the same info so we collapse cycles first. Then we can do the
1780 propagation in one pass from the leaves to the roots. */
1781 for (i = 0; i < order_pos; i++ )
1783 bool can_throw = false;
1784 node = order[i];
1786 if (node->alias)
1787 continue;
1789 /* Find the worst state for any node in the cycle. */
1790 w = node;
1791 while (w && !can_throw)
1793 struct cgraph_edge *e, *ie;
1795 if (!TREE_NOTHROW (w->decl))
1797 funct_state w_l = funct_state_summaries->get_create (w);
1799 if (w_l->can_throw
1800 || w->get_availability () == AVAIL_INTERPOSABLE)
1801 can_throw = true;
1803 for (e = w->callees; e && !can_throw; e = e->next_callee)
1805 enum availability avail;
1807 if (!e->can_throw_external || TREE_NOTHROW (e->callee->decl))
1808 continue;
1810 struct cgraph_node *y = e->callee->
1811 function_or_virtual_thunk_symbol (&avail,
1812 e->caller);
1814 /* We can use info about the callee only if we know it
1815 cannot be interposed.
1816 When callee is compiled with non-call exceptions we also
1817 must check that the declaration is bound to current
1818 body as other semantically equivalent body may still
1819 throw. */
1820 if (avail <= AVAIL_INTERPOSABLE
1821 || (!TREE_NOTHROW (y->decl)
1822 && (funct_state_summaries->get_create (y)->can_throw
1823 || (opt_for_fn (y->decl, flag_non_call_exceptions)
1824 && !e->callee->binds_to_current_def_p (w)))))
1825 can_throw = true;
1827 for (ie = w->indirect_calls; ie && !can_throw;
1828 ie = ie->next_callee)
1829 if (ie->can_throw_external
1830 && !(ie->indirect_info->ecf_flags & ECF_NOTHROW))
1831 can_throw = true;
1833 w_info = (struct ipa_dfs_info *) w->aux;
1834 w = w_info->next_cycle;
1837 /* Copy back the region's pure_const_state which is shared by
1838 all nodes in the region. */
1839 w = node;
1840 while (w)
1842 funct_state w_l = funct_state_summaries->get_create (w);
1843 if (!can_throw && !TREE_NOTHROW (w->decl))
1845 /* Inline clones share declaration with their offline copies;
1846 do not modify their declarations since the offline copy may
1847 be different. */
1848 if (!w->inlined_to)
1850 w->set_nothrow_flag (true);
1851 if (dump_file)
1852 fprintf (dump_file, "Function found to be nothrow: %s\n",
1853 w->dump_name ());
1856 else if (can_throw && !TREE_NOTHROW (w->decl))
1857 w_l->can_throw = true;
1858 w_info = (struct ipa_dfs_info *) w->aux;
1859 w = w_info->next_cycle;
1863 ipa_free_postorder_info ();
1864 free (order);
1867 /* Debugging function to dump state of malloc lattice. */
1869 DEBUG_FUNCTION
1870 static void
1871 dump_malloc_lattice (FILE *dump_file, const char *s)
1873 if (!dump_file)
1874 return;
1876 fprintf (dump_file, "\n\nMALLOC LATTICE %s:\n", s);
1877 cgraph_node *node;
1878 FOR_EACH_FUNCTION (node)
1880 funct_state fs = funct_state_summaries->get (node);
1881 if (fs)
1882 fprintf (dump_file, "%s: %s\n", node->dump_name (),
1883 malloc_state_names[fs->malloc_state]);
1887 /* Propagate malloc attribute across the callgraph. */
1889 static void
1890 propagate_malloc (void)
1892 cgraph_node *node;
1893 FOR_EACH_FUNCTION (node)
1895 if (DECL_IS_MALLOC (node->decl))
1896 if (!funct_state_summaries->exists (node))
1898 funct_state fs = funct_state_summaries->get_create (node);
1899 fs->malloc_state = STATE_MALLOC;
1903 dump_malloc_lattice (dump_file, "Initial");
1904 struct cgraph_node **order
1905 = XNEWVEC (struct cgraph_node *, symtab->cgraph_count);
1906 int order_pos = ipa_reverse_postorder (order);
1907 bool changed = true;
1909 while (changed)
1911 changed = false;
1912 /* Walk in postorder. */
1913 for (int i = order_pos - 1; i >= 0; --i)
1915 cgraph_node *node = order[i];
1916 if (node->alias
1917 || !node->definition
1918 || !funct_state_summaries->exists (node))
1919 continue;
1921 funct_state l = funct_state_summaries->get (node);
1923 /* FIXME: add support for indirect-calls. */
1924 if (node->indirect_calls)
1926 l->malloc_state = STATE_MALLOC_BOTTOM;
1927 continue;
1930 if (node->get_availability () <= AVAIL_INTERPOSABLE)
1932 l->malloc_state = STATE_MALLOC_BOTTOM;
1933 continue;
1936 if (l->malloc_state == STATE_MALLOC_BOTTOM)
1937 continue;
1939 auto_vec<cgraph_node *, 16> callees;
1940 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
1942 ipa_call_summary *es = ipa_call_summaries->get_create (cs);
1943 if (es && es->is_return_callee_uncaptured)
1944 callees.safe_push (cs->callee);
1947 malloc_state_e new_state = l->malloc_state;
1948 for (unsigned j = 0; j < callees.length (); j++)
1950 cgraph_node *callee = callees[j];
1951 if (!funct_state_summaries->exists (node))
1953 new_state = STATE_MALLOC_BOTTOM;
1954 break;
1956 malloc_state_e callee_state
1957 = funct_state_summaries->get_create (callee)->malloc_state;
1958 if (new_state < callee_state)
1959 new_state = callee_state;
1961 if (new_state != l->malloc_state)
1963 changed = true;
1964 l->malloc_state = new_state;
1969 FOR_EACH_DEFINED_FUNCTION (node)
1970 if (funct_state_summaries->exists (node))
1972 funct_state l = funct_state_summaries->get (node);
1973 if (!node->alias
1974 && l->malloc_state == STATE_MALLOC
1975 && !node->inlined_to
1976 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (node->decl))))
1978 if (dump_file && (dump_flags & TDF_DETAILS))
1979 fprintf (dump_file, "Function %s found to be malloc\n",
1980 node->dump_name ());
1982 bool malloc_decl_p = DECL_IS_MALLOC (node->decl);
1983 node->set_malloc_flag (true);
1984 if (!malloc_decl_p && warn_suggest_attribute_malloc)
1985 warn_function_malloc (node->decl);
1989 dump_malloc_lattice (dump_file, "after propagation");
1990 ipa_free_postorder_info ();
1991 free (order);
1994 /* Produce the global information by preforming a transitive closure
1995 on the local information that was produced by generate_summary. */
1997 unsigned int
1998 pass_ipa_pure_const::
1999 execute (function *)
2001 bool remove_p;
2003 /* Nothrow makes more function to not lead to return and improve
2004 later analysis. */
2005 propagate_nothrow ();
2006 propagate_malloc ();
2007 remove_p = propagate_pure_const ();
2009 delete funct_state_summaries;
2010 return remove_p ? TODO_remove_functions : 0;
2013 static bool
2014 gate_pure_const (void)
2016 return flag_ipa_pure_const || in_lto_p;
2019 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
2020 : ipa_opt_pass_d(pass_data_ipa_pure_const, ctxt,
2021 pure_const_generate_summary, /* generate_summary */
2022 pure_const_write_summary, /* write_summary */
2023 pure_const_read_summary, /* read_summary */
2024 NULL, /* write_optimization_summary */
2025 NULL, /* read_optimization_summary */
2026 NULL, /* stmt_fixup */
2027 0, /* function_transform_todo_flags_start */
2028 NULL, /* function_transform */
2029 NULL), /* variable_transform */
2030 init_p (false) {}
2032 ipa_opt_pass_d *
2033 make_pass_ipa_pure_const (gcc::context *ctxt)
2035 return new pass_ipa_pure_const (ctxt);
2038 /* Return true if function should be skipped for local pure const analysis. */
2040 static bool
2041 skip_function_for_local_pure_const (struct cgraph_node *node)
2043 /* Because we do not schedule pass_fixup_cfg over whole program after early
2044 optimizations we must not promote functions that are called by already
2045 processed functions. */
2047 if (function_called_by_processed_nodes_p ())
2049 if (dump_file)
2050 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
2051 return true;
2053 /* Save some work and do not analyze functions which are interposable and
2054 do not have any non-interposable aliases. */
2055 if (node->get_availability () <= AVAIL_INTERPOSABLE
2056 && !node->has_aliases_p ())
2058 if (dump_file)
2059 fprintf (dump_file,
2060 "Function is interposable; not analyzing.\n");
2061 return true;
2063 return false;
2066 /* Simple local pass for pure const discovery reusing the analysis from
2067 ipa_pure_const. This pass is effective when executed together with
2068 other optimization passes in early optimization pass queue. */
2070 namespace {
2072 const pass_data pass_data_local_pure_const =
2074 GIMPLE_PASS, /* type */
2075 "local-pure-const", /* name */
2076 OPTGROUP_NONE, /* optinfo_flags */
2077 TV_IPA_PURE_CONST, /* tv_id */
2078 0, /* properties_required */
2079 0, /* properties_provided */
2080 0, /* properties_destroyed */
2081 0, /* todo_flags_start */
2082 0, /* todo_flags_finish */
2085 class pass_local_pure_const : public gimple_opt_pass
2087 public:
2088 pass_local_pure_const (gcc::context *ctxt)
2089 : gimple_opt_pass (pass_data_local_pure_const, ctxt)
2092 /* opt_pass methods: */
2093 opt_pass * clone () { return new pass_local_pure_const (m_ctxt); }
2094 virtual bool gate (function *) { return gate_pure_const (); }
2095 virtual unsigned int execute (function *);
2097 }; // class pass_local_pure_const
2099 unsigned int
2100 pass_local_pure_const::execute (function *fun)
2102 bool changed = false;
2103 funct_state l;
2104 bool skip;
2105 struct cgraph_node *node;
2107 node = cgraph_node::get (current_function_decl);
2108 skip = skip_function_for_local_pure_const (node);
2110 if (!warn_suggest_attribute_const
2111 && !warn_suggest_attribute_pure
2112 && skip)
2113 return 0;
2115 l = analyze_function (node, false);
2117 /* Do NORETURN discovery. */
2118 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
2119 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2121 warn_function_noreturn (fun->decl);
2122 if (dump_file)
2123 fprintf (dump_file, "Function found to be noreturn: %s\n",
2124 current_function_name ());
2126 /* Update declaration and reduce profile to executed once. */
2127 TREE_THIS_VOLATILE (current_function_decl) = 1;
2128 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
2129 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2131 changed = true;
2134 switch (l->pure_const_state)
2136 case IPA_CONST:
2137 if (!TREE_READONLY (current_function_decl))
2139 warn_function_const (current_function_decl, !l->looping);
2140 if (dump_file)
2141 fprintf (dump_file, "Function found to be %sconst: %s\n",
2142 l->looping ? "looping " : "",
2143 current_function_name ());
2145 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2146 && !l->looping)
2148 if (dump_file)
2149 fprintf (dump_file, "Function found to be non-looping: %s\n",
2150 current_function_name ());
2152 if (!skip && node->set_const_flag (true, l->looping))
2154 if (dump_file)
2155 fprintf (dump_file, "Declaration updated to be %sconst: %s\n",
2156 l->looping ? "looping " : "",
2157 current_function_name ());
2158 changed = true;
2160 break;
2162 case IPA_PURE:
2163 if (!DECL_PURE_P (current_function_decl))
2165 warn_function_pure (current_function_decl, !l->looping);
2166 if (dump_file)
2167 fprintf (dump_file, "Function found to be %spure: %s\n",
2168 l->looping ? "looping " : "",
2169 current_function_name ());
2171 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
2172 && !l->looping)
2174 if (dump_file)
2175 fprintf (dump_file, "Function found to be non-looping: %s\n",
2176 current_function_name ());
2178 if (!skip && node->set_pure_flag (true, l->looping))
2180 if (dump_file)
2181 fprintf (dump_file, "Declaration updated to be %spure: %s\n",
2182 l->looping ? "looping " : "",
2183 current_function_name ());
2184 changed = true;
2186 break;
2188 default:
2189 break;
2191 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
2193 node->set_nothrow_flag (true);
2194 changed = true;
2195 if (dump_file)
2196 fprintf (dump_file, "Function found to be nothrow: %s\n",
2197 current_function_name ());
2200 if (l->malloc_state == STATE_MALLOC
2201 && !DECL_IS_MALLOC (current_function_decl))
2203 node->set_malloc_flag (true);
2204 if (warn_suggest_attribute_malloc)
2205 warn_function_malloc (node->decl);
2206 changed = true;
2207 if (dump_file)
2208 fprintf (dump_file, "Function found to be malloc: %s\n",
2209 node->dump_name ());
2212 free (l);
2213 if (changed)
2214 return execute_fixup_cfg ();
2215 else
2216 return 0;
2219 } // anon namespace
2221 gimple_opt_pass *
2222 make_pass_local_pure_const (gcc::context *ctxt)
2224 return new pass_local_pure_const (ctxt);
2227 /* Emit noreturn warnings. */
2229 namespace {
2231 const pass_data pass_data_warn_function_noreturn =
2233 GIMPLE_PASS, /* type */
2234 "*warn_function_noreturn", /* name */
2235 OPTGROUP_NONE, /* optinfo_flags */
2236 TV_NONE, /* tv_id */
2237 PROP_cfg, /* properties_required */
2238 0, /* properties_provided */
2239 0, /* properties_destroyed */
2240 0, /* todo_flags_start */
2241 0, /* todo_flags_finish */
2244 class pass_warn_function_noreturn : public gimple_opt_pass
2246 public:
2247 pass_warn_function_noreturn (gcc::context *ctxt)
2248 : gimple_opt_pass (pass_data_warn_function_noreturn, ctxt)
2251 /* opt_pass methods: */
2252 virtual bool gate (function *) { return warn_suggest_attribute_noreturn; }
2253 virtual unsigned int execute (function *fun)
2255 if (!TREE_THIS_VOLATILE (current_function_decl)
2256 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) == 0)
2257 warn_function_noreturn (current_function_decl);
2258 return 0;
2261 }; // class pass_warn_function_noreturn
2263 } // anon namespace
2265 gimple_opt_pass *
2266 make_pass_warn_function_noreturn (gcc::context *ctxt)
2268 return new pass_warn_function_noreturn (ctxt);
2271 /* Simple local pass for pure const discovery reusing the analysis from
2272 ipa_pure_const. This pass is effective when executed together with
2273 other optimization passes in early optimization pass queue. */
2275 namespace {
2277 const pass_data pass_data_nothrow =
2279 GIMPLE_PASS, /* type */
2280 "nothrow", /* name */
2281 OPTGROUP_NONE, /* optinfo_flags */
2282 TV_IPA_PURE_CONST, /* tv_id */
2283 0, /* properties_required */
2284 0, /* properties_provided */
2285 0, /* properties_destroyed */
2286 0, /* todo_flags_start */
2287 0, /* todo_flags_finish */
2290 class pass_nothrow : public gimple_opt_pass
2292 public:
2293 pass_nothrow (gcc::context *ctxt)
2294 : gimple_opt_pass (pass_data_nothrow, ctxt)
2297 /* opt_pass methods: */
2298 opt_pass * clone () { return new pass_nothrow (m_ctxt); }
2299 virtual bool gate (function *) { return optimize; }
2300 virtual unsigned int execute (function *);
2302 }; // class pass_nothrow
2304 unsigned int
2305 pass_nothrow::execute (function *)
2307 struct cgraph_node *node;
2308 basic_block this_block;
2310 if (TREE_NOTHROW (current_function_decl))
2311 return 0;
2313 node = cgraph_node::get (current_function_decl);
2315 /* We run during lowering, we cannot really use availability yet. */
2316 if (cgraph_node::get (current_function_decl)->get_availability ()
2317 <= AVAIL_INTERPOSABLE)
2319 if (dump_file)
2320 fprintf (dump_file, "Function is interposable;"
2321 " not analyzing.\n");
2322 return true;
2325 FOR_EACH_BB_FN (this_block, cfun)
2327 for (gimple_stmt_iterator gsi = gsi_start_bb (this_block);
2328 !gsi_end_p (gsi);
2329 gsi_next (&gsi))
2330 if (stmt_can_throw_external (cfun, gsi_stmt (gsi)))
2332 if (is_gimple_call (gsi_stmt (gsi)))
2334 tree callee_t = gimple_call_fndecl (gsi_stmt (gsi));
2335 if (callee_t && recursive_call_p (current_function_decl,
2336 callee_t))
2337 continue;
2340 if (dump_file)
2342 fprintf (dump_file, "Statement can throw: ");
2343 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
2345 return 0;
2349 node->set_nothrow_flag (true);
2351 bool cfg_changed = false;
2352 if (self_recursive_p (node))
2353 FOR_EACH_BB_FN (this_block, cfun)
2354 if (gimple *g = last_stmt (this_block))
2355 if (is_gimple_call (g))
2357 tree callee_t = gimple_call_fndecl (g);
2358 if (callee_t
2359 && recursive_call_p (current_function_decl, callee_t)
2360 && maybe_clean_eh_stmt (g)
2361 && gimple_purge_dead_eh_edges (this_block))
2362 cfg_changed = true;
2365 if (dump_file)
2366 fprintf (dump_file, "Function found to be nothrow: %s\n",
2367 current_function_name ());
2368 return cfg_changed ? TODO_cleanup_cfg : 0;
2371 } // anon namespace
2373 gimple_opt_pass *
2374 make_pass_nothrow (gcc::context *ctxt)
2376 return new pass_nothrow (ctxt);