1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
39 #include "print-tree.h"
42 #include "gimple-iterator.h"
43 #include "gimple-walk.h"
45 #include "tree-ssa-loop-niter.h"
46 #include "tree-inline.h"
47 #include "tree-pass.h"
48 #include "langhooks.h"
49 #include "pointer-set.h"
51 #include "ipa-utils.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "langhooks.h"
57 #include "lto-streamer.h"
58 #include "data-streamer.h"
59 #include "tree-streamer.h"
61 #include "tree-scalar-evolution.h"
65 static struct pointer_set_t
*visited_nodes
;
67 /* Lattice values for const and pure functions. Everything starts out
68 being const, then may drop to pure and then neither depending on
70 enum pure_const_state_e
77 const char *pure_const_names
[3] = {"const", "pure", "neither"};
79 /* Holder for the const_state. There is one of these per function
84 enum pure_const_state_e pure_const_state
;
85 /* What user set here; we can be always sure about this. */
86 enum pure_const_state_e state_previously_known
;
87 bool looping_previously_known
;
89 /* True if the function could possibly infinite loop. There are a
90 lot of ways that this could be determined. We are pretty
91 conservative here. While it is possible to cse pure and const
92 calls, it is not legal to have dce get rid of the call if there
93 is a possibility that the call could infinite loop since this is
94 a behavioral change. */
100 /* State used when we know nothing about function. */
101 static struct funct_state_d varying_state
102 = { IPA_NEITHER
, IPA_NEITHER
, true, true, true };
105 typedef struct funct_state_d
* funct_state
;
107 /* The storage of the funct_state is abstracted because there is the
108 possibility that it may be desirable to move this to the cgraph
111 /* Array, indexed by cgraph node uid, of function states. */
113 static vec
<funct_state
> funct_state_vec
;
115 /* Holders of ipa cgraph hooks: */
116 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
117 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
118 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
120 /* Try to guess if function body will always be visible to compiler
121 when compiling the call and whether compiler will be able
122 to propagate the information by itself. */
125 function_always_visible_to_compiler_p (tree decl
)
127 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
));
130 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
131 is true if the function is known to be finite. The diagnostic is
132 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
133 OPTION, this function may initialize it and it is always returned
136 static struct pointer_set_t
*
137 suggest_attribute (int option
, tree decl
, bool known_finite
,
138 struct pointer_set_t
*warned_about
,
139 const char * attrib_name
)
141 if (!option_enabled (option
, &global_options
))
143 if (TREE_THIS_VOLATILE (decl
)
144 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
148 warned_about
= pointer_set_create ();
149 if (pointer_set_contains (warned_about
, decl
))
151 pointer_set_insert (warned_about
, decl
);
152 warning_at (DECL_SOURCE_LOCATION (decl
),
155 ? _("function might be candidate for attribute %<%s%>")
156 : _("function might be candidate for attribute %<%s%>"
157 " if it is known to return normally"), attrib_name
);
161 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
162 is true if the function is known to be finite. */
165 warn_function_pure (tree decl
, bool known_finite
)
167 static struct pointer_set_t
*warned_about
;
170 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
171 known_finite
, warned_about
, "pure");
174 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
175 is true if the function is known to be finite. */
178 warn_function_const (tree decl
, bool known_finite
)
180 static struct pointer_set_t
*warned_about
;
182 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
183 known_finite
, warned_about
, "const");
187 warn_function_noreturn (tree decl
)
189 static struct pointer_set_t
*warned_about
;
190 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
191 && targetm
.warn_func_return (decl
))
193 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, decl
,
194 true, warned_about
, "noreturn");
197 /* Return true if we have a function state for NODE. */
200 has_function_state (struct cgraph_node
*node
)
202 if (!funct_state_vec
.exists ()
203 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
205 return funct_state_vec
[node
->uid
] != NULL
;
208 /* Return the function state from NODE. */
210 static inline funct_state
211 get_function_state (struct cgraph_node
*node
)
213 if (!funct_state_vec
.exists ()
214 || funct_state_vec
.length () <= (unsigned int)node
->uid
215 || !funct_state_vec
[node
->uid
])
216 /* We might want to put correct previously_known state into varying. */
217 return &varying_state
;
218 return funct_state_vec
[node
->uid
];
221 /* Set the function state S for NODE. */
224 set_function_state (struct cgraph_node
*node
, funct_state s
)
226 if (!funct_state_vec
.exists ()
227 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
228 funct_state_vec
.safe_grow_cleared (node
->uid
+ 1);
229 funct_state_vec
[node
->uid
] = s
;
232 /* Check to see if the use (or definition when CHECKING_WRITE is true)
233 variable T is legal in a function that is either pure or const. */
236 check_decl (funct_state local
,
237 tree t
, bool checking_write
, bool ipa
)
239 /* Do not want to do anything with volatile except mark any
240 function that uses one to be not const or pure. */
241 if (TREE_THIS_VOLATILE (t
))
243 local
->pure_const_state
= IPA_NEITHER
;
245 fprintf (dump_file
, " Volatile operand is not const/pure");
249 /* Do not care about a local automatic that is not static. */
250 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
253 /* If the variable has the "used" attribute, treat it as if it had a
254 been touched by the devil. */
255 if (DECL_PRESERVE_P (t
))
257 local
->pure_const_state
= IPA_NEITHER
;
259 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
263 /* In IPA mode we are not interested in checking actual loads and stores;
264 they will be processed at propagation time using ipa_ref. */
268 /* Since we have dealt with the locals and params cases above, if we
269 are CHECKING_WRITE, this cannot be a pure or constant
273 local
->pure_const_state
= IPA_NEITHER
;
275 fprintf (dump_file
, " static/global memory write is not const/pure\n");
279 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
281 /* Readonly reads are safe. */
282 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
283 return; /* Read of a constant, do not change the function state. */
287 fprintf (dump_file
, " global memory read is not const\n");
288 /* Just a regular read. */
289 if (local
->pure_const_state
== IPA_CONST
)
290 local
->pure_const_state
= IPA_PURE
;
295 /* Compilation level statics can be read if they are readonly
297 if (TREE_READONLY (t
))
301 fprintf (dump_file
, " static memory read is not const\n");
302 /* Just a regular read. */
303 if (local
->pure_const_state
== IPA_CONST
)
304 local
->pure_const_state
= IPA_PURE
;
309 /* Check to see if the use (or definition when CHECKING_WRITE is true)
310 variable T is legal in a function that is either pure or const. */
313 check_op (funct_state local
, tree t
, bool checking_write
)
315 t
= get_base_address (t
);
316 if (t
&& TREE_THIS_VOLATILE (t
))
318 local
->pure_const_state
= IPA_NEITHER
;
320 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
324 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
325 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
326 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
329 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
332 else if (checking_write
)
334 local
->pure_const_state
= IPA_NEITHER
;
336 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
342 fprintf (dump_file
, " Indirect ref read is not const\n");
343 if (local
->pure_const_state
== IPA_CONST
)
344 local
->pure_const_state
= IPA_PURE
;
348 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
351 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
352 int flags
, bool cannot_lead_to_return
)
355 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
358 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
359 fprintf (dump_file
, " looping");
361 if (flags
& ECF_CONST
)
364 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
365 fprintf (dump_file
, " const\n");
367 else if (flags
& ECF_PURE
)
370 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
371 fprintf (dump_file
, " pure\n");
373 else if (cannot_lead_to_return
)
377 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
378 fprintf (dump_file
, " ignoring side effects->pure looping\n");
382 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
383 fprintf (dump_file
, " neither\n");
384 *state
= IPA_NEITHER
;
389 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
390 into STATE and LOOPING better of the two variants.
391 Be sure to merge looping correctly. IPA_NEITHER functions
392 have looping 0 even if they don't have to return. */
395 better_state (enum pure_const_state_e
*state
, bool *looping
,
396 enum pure_const_state_e state2
, bool looping2
)
400 if (*state
== IPA_NEITHER
)
403 *looping
= MIN (*looping
, looping2
);
406 else if (state2
!= IPA_NEITHER
)
407 *looping
= MIN (*looping
, looping2
);
410 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
411 into STATE and LOOPING worse of the two variants. */
414 worse_state (enum pure_const_state_e
*state
, bool *looping
,
415 enum pure_const_state_e state2
, bool looping2
)
417 *state
= MAX (*state
, state2
);
418 *looping
= MAX (*looping
, looping2
);
421 /* Recognize special cases of builtins that are by themselves not pure or const
422 but function using them is. */
424 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
427 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
428 switch (DECL_FUNCTION_CODE (callee
))
430 case BUILT_IN_RETURN
:
431 case BUILT_IN_UNREACHABLE
:
432 case BUILT_IN_ALLOCA
:
433 case BUILT_IN_ALLOCA_WITH_ALIGN
:
434 case BUILT_IN_STACK_SAVE
:
435 case BUILT_IN_STACK_RESTORE
:
436 case BUILT_IN_EH_POINTER
:
437 case BUILT_IN_EH_FILTER
:
438 case BUILT_IN_UNWIND_RESUME
:
439 case BUILT_IN_CXA_END_CLEANUP
:
440 case BUILT_IN_EH_COPY_VALUES
:
441 case BUILT_IN_FRAME_ADDRESS
:
443 case BUILT_IN_APPLY_ARGS
:
447 case BUILT_IN_PREFETCH
:
455 /* Check the parameters of a function call to CALL_EXPR to see if
456 there are any references in the parameters that are not allowed for
457 pure or const functions. Also check to see if this is either an
458 indirect call, a call outside the compilation unit, or has special
459 attributes that may also effect the purity. The CALL_EXPR node for
460 the entire call expression. */
463 check_call (funct_state local
, gimple call
, bool ipa
)
465 int flags
= gimple_call_flags (call
);
466 tree callee_t
= gimple_call_fndecl (call
);
467 bool possibly_throws
= stmt_could_throw_p (call
);
468 bool possibly_throws_externally
= (possibly_throws
469 && stmt_can_throw_external (call
));
474 for (i
= 0; i
< gimple_num_ops (call
); i
++)
475 if (gimple_op (call
, i
)
476 && tree_could_throw_p (gimple_op (call
, i
)))
478 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
481 fprintf (dump_file
, " operand can throw; looping\n");
482 local
->looping
= true;
484 if (possibly_throws_externally
)
487 fprintf (dump_file
, " operand can throw externally\n");
488 local
->can_throw
= true;
493 /* The const and pure flags are set by a variety of places in the
494 compiler (including here). If someone has already set the flags
495 for the callee, (such as for some of the builtins) we will use
496 them, otherwise we will compute our own information.
498 Const and pure functions have less clobber effects than other
499 functions so we process these first. Otherwise if it is a call
500 outside the compilation unit or an indirect call we punt. This
501 leaves local calls which will be processed by following the call
505 enum pure_const_state_e call_state
;
508 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
510 worse_state (&local
->pure_const_state
, &local
->looping
,
511 call_state
, call_looping
);
514 /* When bad things happen to bad functions, they cannot be const
516 if (setjmp_call_p (callee_t
))
519 fprintf (dump_file
, " setjmp is not const/pure\n");
520 local
->looping
= true;
521 local
->pure_const_state
= IPA_NEITHER
;
524 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
525 switch (DECL_FUNCTION_CODE (callee_t
))
527 case BUILT_IN_LONGJMP
:
528 case BUILT_IN_NONLOCAL_GOTO
:
530 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
531 local
->pure_const_state
= IPA_NEITHER
;
532 local
->looping
= true;
539 /* When not in IPA mode, we can still handle self recursion. */
541 && recursive_call_p (current_function_decl
, callee_t
))
544 fprintf (dump_file
, " Recursive call can loop.\n");
545 local
->looping
= true;
547 /* Either callee is unknown or we are doing local analysis.
548 Look to see if there are any bits available for the callee (such as by
549 declaration or because it is builtin) and process solely on the basis of
553 enum pure_const_state_e call_state
;
555 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
558 fprintf (dump_file
, " can throw; looping\n");
559 local
->looping
= true;
561 if (possibly_throws_externally
)
565 fprintf (dump_file
, " can throw externally to lp %i\n",
566 lookup_stmt_eh_lp (call
));
568 fprintf (dump_file
, " callee:%s\n",
569 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
571 local
->can_throw
= true;
573 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
574 fprintf (dump_file
, " checking flags for call:");
575 state_from_flags (&call_state
, &call_looping
, flags
,
576 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
577 == (ECF_NORETURN
| ECF_NOTHROW
))
578 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
579 worse_state (&local
->pure_const_state
, &local
->looping
,
580 call_state
, call_looping
);
582 /* Direct functions calls are handled by IPA propagation. */
585 /* Wrapper around check_decl for loads in local more. */
588 check_load (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
591 check_decl ((funct_state
)data
, op
, false, false);
593 check_op ((funct_state
)data
, op
, false);
597 /* Wrapper around check_decl for stores in local more. */
600 check_store (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
603 check_decl ((funct_state
)data
, op
, true, false);
605 check_op ((funct_state
)data
, op
, true);
609 /* Wrapper around check_decl for loads in ipa mode. */
612 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
615 check_decl ((funct_state
)data
, op
, false, true);
617 check_op ((funct_state
)data
, op
, false);
621 /* Wrapper around check_decl for stores in ipa mode. */
624 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
627 check_decl ((funct_state
)data
, op
, true, true);
629 check_op ((funct_state
)data
, op
, true);
633 /* Look into pointer pointed to by GSIP and figure out what interesting side
636 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
638 gimple stmt
= gsi_stmt (*gsip
);
640 if (is_gimple_debug (stmt
))
645 fprintf (dump_file
, " scanning: ");
646 print_gimple_stmt (dump_file
, stmt
, 0, 0);
649 if (gimple_has_volatile_ops (stmt
)
650 && !gimple_clobber_p (stmt
))
652 local
->pure_const_state
= IPA_NEITHER
;
654 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
657 /* Look for loads and stores. */
658 walk_stmt_load_store_ops (stmt
, local
,
659 ipa
? check_ipa_load
: check_load
,
660 ipa
? check_ipa_store
: check_store
);
662 if (gimple_code (stmt
) != GIMPLE_CALL
663 && stmt_could_throw_p (stmt
))
665 if (cfun
->can_throw_non_call_exceptions
)
668 fprintf (dump_file
, " can throw; looping\n");
669 local
->looping
= true;
671 if (stmt_can_throw_external (stmt
))
674 fprintf (dump_file
, " can throw externally\n");
675 local
->can_throw
= true;
679 fprintf (dump_file
, " can throw\n");
681 switch (gimple_code (stmt
))
684 check_call (local
, stmt
, ipa
);
687 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
688 /* Target of long jump. */
691 fprintf (dump_file
, " nonlocal label is not const/pure\n");
692 local
->pure_const_state
= IPA_NEITHER
;
696 if (gimple_asm_clobbers_memory_p (stmt
))
699 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
700 /* Abandon all hope, ye who enter here. */
701 local
->pure_const_state
= IPA_NEITHER
;
703 if (gimple_asm_volatile_p (stmt
))
706 fprintf (dump_file
, " volatile is not const/pure\n");
707 /* Abandon all hope, ye who enter here. */
708 local
->pure_const_state
= IPA_NEITHER
;
709 local
->looping
= true;
718 /* This is the main routine for finding the reference patterns for
719 global variables within a function FN. */
722 analyze_function (struct cgraph_node
*fn
, bool ipa
)
724 tree decl
= fn
->decl
;
726 basic_block this_block
;
728 l
= XCNEW (struct funct_state_d
);
729 l
->pure_const_state
= IPA_CONST
;
730 l
->state_previously_known
= IPA_NEITHER
;
731 l
->looping_previously_known
= true;
733 l
->can_throw
= false;
734 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
735 flags_from_decl_or_type (fn
->decl
),
736 cgraph_node_cannot_return (fn
));
738 if (fn
->thunk
.thunk_p
|| fn
->alias
)
740 /* Thunk gets propagated through, so nothing interesting happens. */
747 fprintf (dump_file
, "\n\n local analysis of %s\n ",
751 push_cfun (DECL_STRUCT_FUNCTION (decl
));
753 FOR_EACH_BB (this_block
)
755 gimple_stmt_iterator gsi
;
756 struct walk_stmt_info wi
;
758 memset (&wi
, 0, sizeof (wi
));
759 for (gsi
= gsi_start_bb (this_block
);
763 check_stmt (&gsi
, l
, ipa
);
764 if (l
->pure_const_state
== IPA_NEITHER
&& l
->looping
&& l
->can_throw
)
770 if (l
->pure_const_state
!= IPA_NEITHER
)
772 /* Const functions cannot have back edges (an
773 indication of possible infinite loop side
775 if (mark_dfs_back_edges ())
777 /* Preheaders are needed for SCEV to work.
778 Simple latches and recorded exits improve chances that loop will
779 proved to be finite in testcases such as in loop-15.c
781 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
782 | LOOPS_HAVE_SIMPLE_LATCHES
783 | LOOPS_HAVE_RECORDED_EXITS
);
784 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
785 flow_loops_dump (dump_file
, NULL
, 0);
786 if (mark_irreducible_loops ())
789 fprintf (dump_file
, " has irreducible loops\n");
796 FOR_EACH_LOOP (loop
, 0)
797 if (!finite_loop_p (loop
))
800 fprintf (dump_file
, " can not prove finiteness of "
801 "loop %i\n", loop
->num
);
807 loop_optimizer_finalize ();
811 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
812 fprintf (dump_file
, " checking previously known:");
814 better_state (&l
->pure_const_state
, &l
->looping
,
815 l
->state_previously_known
,
816 l
->looping_previously_known
);
817 if (TREE_NOTHROW (decl
))
818 l
->can_throw
= false;
824 fprintf (dump_file
, "Function is locally looping.\n");
826 fprintf (dump_file
, "Function is locally throwing.\n");
827 if (l
->pure_const_state
== IPA_CONST
)
828 fprintf (dump_file
, "Function is locally const.\n");
829 if (l
->pure_const_state
== IPA_PURE
)
830 fprintf (dump_file
, "Function is locally pure.\n");
835 /* Called when new function is inserted to callgraph late. */
837 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
839 if (cgraph_function_body_availability (node
) < AVAIL_OVERWRITABLE
)
841 /* There are some shared nodes, in particular the initializers on
842 static declarations. We do not need to scan them more than once
843 since all we would be interested in are the addressof
845 visited_nodes
= pointer_set_create ();
846 if (cgraph_function_body_availability (node
) > AVAIL_OVERWRITABLE
)
847 set_function_state (node
, analyze_function (node
, true));
848 pointer_set_destroy (visited_nodes
);
849 visited_nodes
= NULL
;
852 /* Called when new clone is inserted to callgraph late. */
855 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
856 void *data ATTRIBUTE_UNUSED
)
858 if (has_function_state (src
))
860 funct_state l
= XNEW (struct funct_state_d
);
861 gcc_assert (!has_function_state (dst
));
862 memcpy (l
, get_function_state (src
), sizeof (*l
));
863 set_function_state (dst
, l
);
867 /* Called when new clone is inserted to callgraph late. */
870 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
872 if (has_function_state (node
))
874 funct_state l
= get_function_state (node
);
875 if (l
!= &varying_state
)
877 set_function_state (node
, NULL
);
883 register_hooks (void)
885 static bool init_p
= false;
892 node_removal_hook_holder
=
893 cgraph_add_node_removal_hook (&remove_node_data
, NULL
);
894 node_duplication_hook_holder
=
895 cgraph_add_node_duplication_hook (&duplicate_node_data
, NULL
);
896 function_insertion_hook_holder
=
897 cgraph_add_function_insertion_hook (&add_new_function
, NULL
);
901 /* Analyze each function in the cgraph to see if it is locally PURE or
905 pure_const_generate_summary (void)
907 struct cgraph_node
*node
;
911 /* There are some shared nodes, in particular the initializers on
912 static declarations. We do not need to scan them more than once
913 since all we would be interested in are the addressof
915 visited_nodes
= pointer_set_create ();
917 /* Process all of the functions.
919 We process AVAIL_OVERWRITABLE functions. We can not use the results
920 by default, but the info can be used at LTO with -fwhole-program or
921 when function got cloned and the clone is AVAILABLE. */
923 FOR_EACH_DEFINED_FUNCTION (node
)
924 if (cgraph_function_body_availability (node
) >= AVAIL_OVERWRITABLE
)
925 set_function_state (node
, analyze_function (node
, true));
927 pointer_set_destroy (visited_nodes
);
928 visited_nodes
= NULL
;
932 /* Serialize the ipa info for lto. */
935 pure_const_write_summary (void)
937 struct cgraph_node
*node
;
938 struct lto_simple_output_block
*ob
939 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
940 unsigned int count
= 0;
941 lto_symtab_encoder_iterator lsei
;
942 lto_symtab_encoder_t encoder
;
944 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
946 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
947 lsei_next_function_in_partition (&lsei
))
949 node
= lsei_cgraph_node (lsei
);
950 if (node
->definition
&& has_function_state (node
))
954 streamer_write_uhwi_stream (ob
->main_stream
, count
);
956 /* Process all of the functions. */
957 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
958 lsei_next_function_in_partition (&lsei
))
960 node
= lsei_cgraph_node (lsei
);
961 if (node
->definition
&& has_function_state (node
))
966 lto_symtab_encoder_t encoder
;
968 fs
= get_function_state (node
);
970 encoder
= ob
->decl_state
->symtab_node_encoder
;
971 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
972 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
974 /* Note that flags will need to be read in the opposite
975 order as we are pushing the bitflags into FLAGS. */
976 bp
= bitpack_create (ob
->main_stream
);
977 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
978 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
979 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
980 bp_pack_value (&bp
, fs
->looping
, 1);
981 bp_pack_value (&bp
, fs
->can_throw
, 1);
982 streamer_write_bitpack (&bp
);
986 lto_destroy_simple_output_block (ob
);
990 /* Deserialize the ipa info for lto. */
993 pure_const_read_summary (void)
995 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
996 struct lto_file_decl_data
*file_data
;
1000 while ((file_data
= file_data_vec
[j
++]))
1004 struct lto_input_block
*ib
1005 = lto_create_simple_input_block (file_data
,
1006 LTO_section_ipa_pure_const
,
1011 unsigned int count
= streamer_read_uhwi (ib
);
1013 for (i
= 0; i
< count
; i
++)
1016 struct cgraph_node
*node
;
1017 struct bitpack_d bp
;
1019 lto_symtab_encoder_t encoder
;
1021 fs
= XCNEW (struct funct_state_d
);
1022 index
= streamer_read_uhwi (ib
);
1023 encoder
= file_data
->symtab_node_encoder
;
1024 node
= cgraph (lto_symtab_encoder_deref (encoder
, index
));
1025 set_function_state (node
, fs
);
1027 /* Note that the flags must be read in the opposite
1028 order in which they were written (the bitflags were
1029 pushed into FLAGS). */
1030 bp
= streamer_read_bitpack (ib
);
1031 fs
->pure_const_state
1032 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1033 fs
->state_previously_known
1034 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1035 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1036 fs
->looping
= bp_unpack_value (&bp
, 1);
1037 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1040 int flags
= flags_from_decl_or_type (node
->decl
);
1041 fprintf (dump_file
, "Read info for %s/%i ",
1044 if (flags
& ECF_CONST
)
1045 fprintf (dump_file
, " const");
1046 if (flags
& ECF_PURE
)
1047 fprintf (dump_file
, " pure");
1048 if (flags
& ECF_NOTHROW
)
1049 fprintf (dump_file
, " nothrow");
1050 fprintf (dump_file
, "\n pure const state: %s\n",
1051 pure_const_names
[fs
->pure_const_state
]);
1052 fprintf (dump_file
, " previously known state: %s\n",
1053 pure_const_names
[fs
->looping_previously_known
]);
1055 fprintf (dump_file
," function is locally looping\n");
1056 if (fs
->looping_previously_known
)
1057 fprintf (dump_file
," function is previously known looping\n");
1059 fprintf (dump_file
," function is locally throwing\n");
1063 lto_destroy_simple_input_block (file_data
,
1064 LTO_section_ipa_pure_const
,
1072 ignore_edge (struct cgraph_edge
*e
)
1074 return (!e
->can_throw_external
);
1077 /* Return true if NODE is self recursive function.
1078 Indirectly recursive functions appears as non-trivial strongly
1079 connected components, so we need to care about self recursion
1083 self_recursive_p (struct cgraph_node
*node
)
1085 struct cgraph_edge
*e
;
1086 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1087 if (cgraph_function_node (e
->callee
, NULL
) == node
)
1092 /* Produce transitive closure over the callgraph and compute pure/const
1096 propagate_pure_const (void)
1098 struct cgraph_node
*node
;
1099 struct cgraph_node
*w
;
1100 struct cgraph_node
**order
=
1101 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1104 struct ipa_dfs_info
* w_info
;
1106 order_pos
= ipa_reduced_postorder (order
, true, false, NULL
);
1109 dump_cgraph (dump_file
);
1110 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1113 /* Propagate the local information through the call graph to produce
1114 the global information. All the nodes within a cycle will have
1115 the same info so we collapse cycles first. Then we can do the
1116 propagation in one pass from the leaves to the roots. */
1117 for (i
= 0; i
< order_pos
; i
++ )
1119 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1120 bool looping
= false;
1127 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1128 fprintf (dump_file
, "Starting cycle\n");
1130 /* Find the worst state for any node in the cycle. */
1132 while (w
&& pure_const_state
!= IPA_NEITHER
)
1134 struct cgraph_edge
*e
;
1135 struct cgraph_edge
*ie
;
1137 struct ipa_ref
*ref
;
1139 funct_state w_l
= get_function_state (w
);
1140 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1141 fprintf (dump_file
, " Visiting %s/%i state:%s looping %i\n",
1144 pure_const_names
[w_l
->pure_const_state
],
1147 /* First merge in function body properties. */
1148 worse_state (&pure_const_state
, &looping
,
1149 w_l
->pure_const_state
, w_l
->looping
);
1150 if (pure_const_state
== IPA_NEITHER
)
1153 /* For overwritable nodes we can not assume anything. */
1154 if (cgraph_function_body_availability (w
) == AVAIL_OVERWRITABLE
)
1156 worse_state (&pure_const_state
, &looping
,
1157 w_l
->state_previously_known
,
1158 w_l
->looping_previously_known
);
1159 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1162 " Overwritable. state %s looping %i\n",
1163 pure_const_names
[w_l
->state_previously_known
],
1164 w_l
->looping_previously_known
);
1171 /* We consider recursive cycles as possibly infinite.
1172 This might be relaxed since infinite recursion leads to stack
1177 /* Now walk the edges and merge in callee properties. */
1178 for (e
= w
->callees
; e
; e
= e
->next_callee
)
1180 enum availability avail
;
1181 struct cgraph_node
*y
= cgraph_function_node (e
->callee
, &avail
);
1182 enum pure_const_state_e edge_state
= IPA_CONST
;
1183 bool edge_looping
= false;
1185 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1192 if (avail
> AVAIL_OVERWRITABLE
)
1194 funct_state y_l
= get_function_state (y
);
1195 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1198 " state:%s looping:%i\n",
1199 pure_const_names
[y_l
->pure_const_state
],
1202 if (y_l
->pure_const_state
> IPA_PURE
1203 && cgraph_edge_cannot_lead_to_return (e
))
1205 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1207 " Ignoring side effects"
1208 " -> pure, looping\n");
1209 edge_state
= IPA_PURE
;
1210 edge_looping
= true;
1214 edge_state
= y_l
->pure_const_state
;
1215 edge_looping
= y_l
->looping
;
1218 else if (special_builtin_state (&edge_state
, &edge_looping
,
1222 state_from_flags (&edge_state
, &edge_looping
,
1223 flags_from_decl_or_type (y
->decl
),
1224 cgraph_edge_cannot_lead_to_return (e
));
1226 /* Merge the results with what we already know. */
1227 better_state (&edge_state
, &edge_looping
,
1228 w_l
->state_previously_known
,
1229 w_l
->looping_previously_known
);
1230 worse_state (&pure_const_state
, &looping
,
1231 edge_state
, edge_looping
);
1232 if (pure_const_state
== IPA_NEITHER
)
1235 if (pure_const_state
== IPA_NEITHER
)
1238 /* Now process the indirect call. */
1239 for (ie
= w
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1241 enum pure_const_state_e edge_state
= IPA_CONST
;
1242 bool edge_looping
= false;
1244 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1245 fprintf (dump_file
, " Indirect call");
1246 state_from_flags (&edge_state
, &edge_looping
,
1247 ie
->indirect_info
->ecf_flags
,
1248 cgraph_edge_cannot_lead_to_return (ie
));
1249 /* Merge the results with what we already know. */
1250 better_state (&edge_state
, &edge_looping
,
1251 w_l
->state_previously_known
,
1252 w_l
->looping_previously_known
);
1253 worse_state (&pure_const_state
, &looping
,
1254 edge_state
, edge_looping
);
1255 if (pure_const_state
== IPA_NEITHER
)
1258 if (pure_const_state
== IPA_NEITHER
)
1261 /* And finally all loads and stores. */
1262 for (i
= 0; ipa_ref_list_reference_iterate (&w
->ref_list
, i
, ref
); i
++)
1264 enum pure_const_state_e ref_state
= IPA_CONST
;
1265 bool ref_looping
= false;
1269 /* readonly reads are safe. */
1270 if (TREE_READONLY (ipa_ref_varpool_node (ref
)->decl
))
1272 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1273 fprintf (dump_file
, " nonreadonly global var read\n");
1274 ref_state
= IPA_PURE
;
1277 if (ipa_ref_cannot_lead_to_return (ref
))
1279 ref_state
= IPA_NEITHER
;
1280 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1281 fprintf (dump_file
, " global var write\n");
1286 better_state (&ref_state
, &ref_looping
,
1287 w_l
->state_previously_known
,
1288 w_l
->looping_previously_known
);
1289 worse_state (&pure_const_state
, &looping
,
1290 ref_state
, ref_looping
);
1291 if (pure_const_state
== IPA_NEITHER
)
1294 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1295 w
= w_info
->next_cycle
;
1297 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1298 fprintf (dump_file
, "Result %s looping %i\n",
1299 pure_const_names
[pure_const_state
],
1302 /* Copy back the region's pure_const_state which is shared by
1303 all nodes in the region. */
1307 funct_state w_l
= get_function_state (w
);
1308 enum pure_const_state_e this_state
= pure_const_state
;
1309 bool this_looping
= looping
;
1311 if (w_l
->state_previously_known
!= IPA_NEITHER
1312 && this_state
> w_l
->state_previously_known
)
1314 this_state
= w_l
->state_previously_known
;
1315 this_looping
|= w_l
->looping_previously_known
;
1317 if (!this_looping
&& self_recursive_p (w
))
1318 this_looping
= true;
1319 if (!w_l
->looping_previously_known
)
1320 this_looping
= false;
1322 /* All nodes within a cycle share the same info. */
1323 w_l
->pure_const_state
= this_state
;
1324 w_l
->looping
= this_looping
;
1329 if (!TREE_READONLY (w
->decl
))
1331 warn_function_const (w
->decl
, !this_looping
);
1333 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1334 this_looping
? "looping " : "",
1337 cgraph_set_const_flag (w
, true, this_looping
);
1341 if (!DECL_PURE_P (w
->decl
))
1343 warn_function_pure (w
->decl
, !this_looping
);
1345 fprintf (dump_file
, "Function found to be %spure: %s\n",
1346 this_looping
? "looping " : "",
1349 cgraph_set_pure_flag (w
, true, this_looping
);
1355 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1356 w
= w_info
->next_cycle
;
1360 ipa_free_postorder_info ();
1364 /* Produce transitive closure over the callgraph and compute nothrow
1368 propagate_nothrow (void)
1370 struct cgraph_node
*node
;
1371 struct cgraph_node
*w
;
1372 struct cgraph_node
**order
=
1373 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
1376 struct ipa_dfs_info
* w_info
;
1378 order_pos
= ipa_reduced_postorder (order
, true, false, ignore_edge
);
1381 dump_cgraph (dump_file
);
1382 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1385 /* Propagate the local information through the call graph to produce
1386 the global information. All the nodes within a cycle will have
1387 the same info so we collapse cycles first. Then we can do the
1388 propagation in one pass from the leaves to the roots. */
1389 for (i
= 0; i
< order_pos
; i
++ )
1391 bool can_throw
= false;
1397 /* Find the worst state for any node in the cycle. */
1401 struct cgraph_edge
*e
, *ie
;
1402 funct_state w_l
= get_function_state (w
);
1405 || cgraph_function_body_availability (w
) == AVAIL_OVERWRITABLE
)
1411 for (e
= w
->callees
; e
; e
= e
->next_callee
)
1413 enum availability avail
;
1414 struct cgraph_node
*y
= cgraph_function_node (e
->callee
, &avail
);
1416 if (avail
> AVAIL_OVERWRITABLE
)
1418 funct_state y_l
= get_function_state (y
);
1422 if (y_l
->can_throw
&& !TREE_NOTHROW (w
->decl
)
1423 && e
->can_throw_external
)
1426 else if (e
->can_throw_external
&& !TREE_NOTHROW (y
->decl
))
1429 for (ie
= node
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1430 if (ie
->can_throw_external
)
1435 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1436 w
= w_info
->next_cycle
;
1439 /* Copy back the region's pure_const_state which is shared by
1440 all nodes in the region. */
1444 funct_state w_l
= get_function_state (w
);
1445 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1447 cgraph_set_nothrow_flag (w
, true);
1449 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1452 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1453 w_l
->can_throw
= true;
1454 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1455 w
= w_info
->next_cycle
;
1459 ipa_free_postorder_info ();
1464 /* Produce the global information by preforming a transitive closure
1465 on the local information that was produced by generate_summary. */
1470 struct cgraph_node
*node
;
1472 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
1473 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
1474 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
1476 /* Nothrow makes more function to not lead to return and improve
1478 propagate_nothrow ();
1479 propagate_pure_const ();
1482 FOR_EACH_FUNCTION (node
)
1483 if (has_function_state (node
))
1484 free (get_function_state (node
));
1485 funct_state_vec
.release ();
1490 gate_pure_const (void)
1492 return (flag_ipa_pure_const
1493 /* Don't bother doing anything if the program has errors. */
1499 const pass_data pass_data_ipa_pure_const
=
1501 IPA_PASS
, /* type */
1502 "pure-const", /* name */
1503 OPTGROUP_NONE
, /* optinfo_flags */
1504 true, /* has_gate */
1505 true, /* has_execute */
1506 TV_IPA_PURE_CONST
, /* tv_id */
1507 0, /* properties_required */
1508 0, /* properties_provided */
1509 0, /* properties_destroyed */
1510 0, /* todo_flags_start */
1511 0, /* todo_flags_finish */
1514 class pass_ipa_pure_const
: public ipa_opt_pass_d
1517 pass_ipa_pure_const (gcc::context
*ctxt
)
1518 : ipa_opt_pass_d (pass_data_ipa_pure_const
, ctxt
,
1519 pure_const_generate_summary
, /* generate_summary */
1520 pure_const_write_summary
, /* write_summary */
1521 pure_const_read_summary
, /* read_summary */
1522 NULL
, /* write_optimization_summary */
1523 NULL
, /* read_optimization_summary */
1524 NULL
, /* stmt_fixup */
1525 0, /* function_transform_todo_flags_start */
1526 NULL
, /* function_transform */
1527 NULL
) /* variable_transform */
1530 /* opt_pass methods: */
1531 bool gate () { return gate_pure_const (); }
1532 unsigned int execute () { return propagate (); }
1534 }; // class pass_ipa_pure_const
1539 make_pass_ipa_pure_const (gcc::context
*ctxt
)
1541 return new pass_ipa_pure_const (ctxt
);
1544 /* Return true if function should be skipped for local pure const analysis. */
1547 skip_function_for_local_pure_const (struct cgraph_node
*node
)
1549 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1550 we must not promote functions that are called by already processed functions. */
1552 if (function_called_by_processed_nodes_p ())
1555 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
1558 if (cgraph_function_body_availability (node
) <= AVAIL_OVERWRITABLE
)
1561 fprintf (dump_file
, "Function is not available or overwritable; not analyzing.\n");
1567 /* Simple local pass for pure const discovery reusing the analysis from
1568 ipa_pure_const. This pass is effective when executed together with
1569 other optimization passes in early optimization pass queue. */
1572 local_pure_const (void)
1574 bool changed
= false;
1577 struct cgraph_node
*node
;
1579 node
= cgraph_get_node (current_function_decl
);
1580 skip
= skip_function_for_local_pure_const (node
);
1581 if (!warn_suggest_attribute_const
1582 && !warn_suggest_attribute_pure
1586 l
= analyze_function (node
, false);
1588 /* Do NORETURN discovery. */
1589 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
1590 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0)
1592 warn_function_noreturn (cfun
->decl
);
1594 fprintf (dump_file
, "Function found to be noreturn: %s\n",
1595 current_function_name ());
1597 /* Update declaration and reduce profile to executed once. */
1598 TREE_THIS_VOLATILE (current_function_decl
) = 1;
1599 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
1600 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
1605 switch (l
->pure_const_state
)
1608 if (!TREE_READONLY (current_function_decl
))
1610 warn_function_const (current_function_decl
, !l
->looping
);
1613 cgraph_set_const_flag (node
, true, l
->looping
);
1617 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1618 l
->looping
? "looping " : "",
1619 current_function_name ());
1621 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1626 cgraph_set_const_flag (node
, true, false);
1630 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1631 current_function_name ());
1636 if (!DECL_PURE_P (current_function_decl
))
1640 cgraph_set_pure_flag (node
, true, l
->looping
);
1643 warn_function_pure (current_function_decl
, !l
->looping
);
1645 fprintf (dump_file
, "Function found to be %spure: %s\n",
1646 l
->looping
? "looping " : "",
1647 current_function_name ());
1649 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1654 cgraph_set_pure_flag (node
, true, false);
1658 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1659 current_function_name ());
1666 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
1668 cgraph_set_nothrow_flag (node
, true);
1671 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1672 current_function_name ());
1676 return execute_fixup_cfg ();
1683 const pass_data pass_data_local_pure_const
=
1685 GIMPLE_PASS
, /* type */
1686 "local-pure-const", /* name */
1687 OPTGROUP_NONE
, /* optinfo_flags */
1688 true, /* has_gate */
1689 true, /* has_execute */
1690 TV_IPA_PURE_CONST
, /* tv_id */
1691 0, /* properties_required */
1692 0, /* properties_provided */
1693 0, /* properties_destroyed */
1694 0, /* todo_flags_start */
1695 0, /* todo_flags_finish */
1698 class pass_local_pure_const
: public gimple_opt_pass
1701 pass_local_pure_const (gcc::context
*ctxt
)
1702 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
1705 /* opt_pass methods: */
1706 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
1707 bool gate () { return gate_pure_const (); }
1708 unsigned int execute () { return local_pure_const (); }
1710 }; // class pass_local_pure_const
1715 make_pass_local_pure_const (gcc::context
*ctxt
)
1717 return new pass_local_pure_const (ctxt
);
1720 /* Emit noreturn warnings. */
1723 execute_warn_function_noreturn (void)
1725 if (!TREE_THIS_VOLATILE (current_function_decl
)
1726 && EDGE_COUNT (EXIT_BLOCK_PTR
->preds
) == 0)
1727 warn_function_noreturn (current_function_decl
);
1732 gate_warn_function_noreturn (void)
1734 return warn_suggest_attribute_noreturn
;
1739 const pass_data pass_data_warn_function_noreturn
=
1741 GIMPLE_PASS
, /* type */
1742 "*warn_function_noreturn", /* name */
1743 OPTGROUP_NONE
, /* optinfo_flags */
1744 true, /* has_gate */
1745 true, /* has_execute */
1746 TV_NONE
, /* tv_id */
1747 PROP_cfg
, /* properties_required */
1748 0, /* properties_provided */
1749 0, /* properties_destroyed */
1750 0, /* todo_flags_start */
1751 0, /* todo_flags_finish */
1754 class pass_warn_function_noreturn
: public gimple_opt_pass
1757 pass_warn_function_noreturn (gcc::context
*ctxt
)
1758 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
1761 /* opt_pass methods: */
1762 bool gate () { return gate_warn_function_noreturn (); }
1763 unsigned int execute () { return execute_warn_function_noreturn (); }
1765 }; // class pass_warn_function_noreturn
1770 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
1772 return new pass_warn_function_noreturn (ctxt
);