1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
39 #include "print-tree.h"
46 #include "hard-reg-set.h"
49 #include "dominance.h"
52 #include "basic-block.h"
53 #include "tree-ssa-alias.h"
54 #include "internal-fn.h"
56 #include "gimple-expr.h"
59 #include "gimple-iterator.h"
60 #include "gimple-walk.h"
62 #include "tree-ssa-loop-niter.h"
63 #include "tree-inline.h"
64 #include "tree-pass.h"
65 #include "langhooks.h"
67 #include "plugin-api.h"
70 #include "ipa-utils.h"
72 #include "diagnostic.h"
73 #include "gimple-pretty-print.h"
74 #include "langhooks.h"
76 #include "lto-streamer.h"
77 #include "data-streamer.h"
78 #include "tree-streamer.h"
80 #include "tree-scalar-evolution.h"
84 /* Lattice values for const and pure functions. Everything starts out
85 being const, then may drop to pure and then neither depending on
87 enum pure_const_state_e
94 const char *pure_const_names
[3] = {"const", "pure", "neither"};
96 /* Holder for the const_state. There is one of these per function
101 enum pure_const_state_e pure_const_state
;
102 /* What user set here; we can be always sure about this. */
103 enum pure_const_state_e state_previously_known
;
104 bool looping_previously_known
;
106 /* True if the function could possibly infinite loop. There are a
107 lot of ways that this could be determined. We are pretty
108 conservative here. While it is possible to cse pure and const
109 calls, it is not legal to have dce get rid of the call if there
110 is a possibility that the call could infinite loop since this is
111 a behavioral change. */
116 /* If function can call free, munmap or otherwise make previously
117 non-trapping memory accesses trapping. */
121 /* State used when we know nothing about function. */
122 static struct funct_state_d varying_state
123 = { IPA_NEITHER
, IPA_NEITHER
, true, true, true, true };
126 typedef struct funct_state_d
* funct_state
;
128 /* The storage of the funct_state is abstracted because there is the
129 possibility that it may be desirable to move this to the cgraph
132 /* Array, indexed by cgraph node uid, of function states. */
134 static vec
<funct_state
> funct_state_vec
;
136 static bool gate_pure_const (void);
140 const pass_data pass_data_ipa_pure_const
=
143 "pure-const", /* name */
144 OPTGROUP_NONE
, /* optinfo_flags */
145 TV_IPA_PURE_CONST
, /* tv_id */
146 0, /* properties_required */
147 0, /* properties_provided */
148 0, /* properties_destroyed */
149 0, /* todo_flags_start */
150 0, /* todo_flags_finish */
153 class pass_ipa_pure_const
: public ipa_opt_pass_d
156 pass_ipa_pure_const(gcc::context
*ctxt
);
158 /* opt_pass methods: */
159 bool gate (function
*) { return gate_pure_const (); }
160 unsigned int execute (function
*fun
);
162 void register_hooks (void);
167 /* Holders of ipa cgraph hooks: */
168 struct cgraph_node_hook_list
*function_insertion_hook_holder
;
169 struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
170 struct cgraph_node_hook_list
*node_removal_hook_holder
;
172 }; // class pass_ipa_pure_const
176 /* Try to guess if function body will always be visible to compiler
177 when compiling the call and whether compiler will be able
178 to propagate the information by itself. */
181 function_always_visible_to_compiler_p (tree decl
)
183 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
));
186 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
187 is true if the function is known to be finite. The diagnostic is
188 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
189 OPTION, this function may initialize it and it is always returned
192 static hash_set
<tree
> *
193 suggest_attribute (int option
, tree decl
, bool known_finite
,
194 hash_set
<tree
> *warned_about
,
195 const char * attrib_name
)
197 if (!option_enabled (option
, &global_options
))
199 if (TREE_THIS_VOLATILE (decl
)
200 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
204 warned_about
= new hash_set
<tree
>;
205 if (warned_about
->contains (decl
))
207 warned_about
->add (decl
);
208 warning_at (DECL_SOURCE_LOCATION (decl
),
211 ? _("function might be candidate for attribute %<%s%>")
212 : _("function might be candidate for attribute %<%s%>"
213 " if it is known to return normally"), attrib_name
);
217 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
218 is true if the function is known to be finite. */
221 warn_function_pure (tree decl
, bool known_finite
)
223 static hash_set
<tree
> *warned_about
;
226 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
227 known_finite
, warned_about
, "pure");
230 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
231 is true if the function is known to be finite. */
234 warn_function_const (tree decl
, bool known_finite
)
236 static hash_set
<tree
> *warned_about
;
238 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
239 known_finite
, warned_about
, "const");
243 warn_function_noreturn (tree decl
)
245 static hash_set
<tree
> *warned_about
;
246 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
247 && targetm
.warn_func_return (decl
))
249 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, decl
,
250 true, warned_about
, "noreturn");
253 /* Return true if we have a function state for NODE. */
256 has_function_state (struct cgraph_node
*node
)
258 if (!funct_state_vec
.exists ()
259 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
261 return funct_state_vec
[node
->uid
] != NULL
;
264 /* Return the function state from NODE. */
266 static inline funct_state
267 get_function_state (struct cgraph_node
*node
)
269 if (!funct_state_vec
.exists ()
270 || funct_state_vec
.length () <= (unsigned int)node
->uid
271 || !funct_state_vec
[node
->uid
])
272 /* We might want to put correct previously_known state into varying. */
273 return &varying_state
;
274 return funct_state_vec
[node
->uid
];
277 /* Set the function state S for NODE. */
280 set_function_state (struct cgraph_node
*node
, funct_state s
)
282 if (!funct_state_vec
.exists ()
283 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
284 funct_state_vec
.safe_grow_cleared (node
->uid
+ 1);
285 funct_state_vec
[node
->uid
] = s
;
288 /* Check to see if the use (or definition when CHECKING_WRITE is true)
289 variable T is legal in a function that is either pure or const. */
292 check_decl (funct_state local
,
293 tree t
, bool checking_write
, bool ipa
)
295 /* Do not want to do anything with volatile except mark any
296 function that uses one to be not const or pure. */
297 if (TREE_THIS_VOLATILE (t
))
299 local
->pure_const_state
= IPA_NEITHER
;
301 fprintf (dump_file
, " Volatile operand is not const/pure");
305 /* Do not care about a local automatic that is not static. */
306 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
309 /* If the variable has the "used" attribute, treat it as if it had a
310 been touched by the devil. */
311 if (DECL_PRESERVE_P (t
))
313 local
->pure_const_state
= IPA_NEITHER
;
315 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
319 /* In IPA mode we are not interested in checking actual loads and stores;
320 they will be processed at propagation time using ipa_ref. */
324 /* Since we have dealt with the locals and params cases above, if we
325 are CHECKING_WRITE, this cannot be a pure or constant
329 local
->pure_const_state
= IPA_NEITHER
;
331 fprintf (dump_file
, " static/global memory write is not const/pure\n");
335 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
337 /* Readonly reads are safe. */
338 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
339 return; /* Read of a constant, do not change the function state. */
343 fprintf (dump_file
, " global memory read is not const\n");
344 /* Just a regular read. */
345 if (local
->pure_const_state
== IPA_CONST
)
346 local
->pure_const_state
= IPA_PURE
;
351 /* Compilation level statics can be read if they are readonly
353 if (TREE_READONLY (t
))
357 fprintf (dump_file
, " static memory read is not const\n");
358 /* Just a regular read. */
359 if (local
->pure_const_state
== IPA_CONST
)
360 local
->pure_const_state
= IPA_PURE
;
365 /* Check to see if the use (or definition when CHECKING_WRITE is true)
366 variable T is legal in a function that is either pure or const. */
369 check_op (funct_state local
, tree t
, bool checking_write
)
371 t
= get_base_address (t
);
372 if (t
&& TREE_THIS_VOLATILE (t
))
374 local
->pure_const_state
= IPA_NEITHER
;
376 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
380 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
381 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
382 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
385 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
388 else if (checking_write
)
390 local
->pure_const_state
= IPA_NEITHER
;
392 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
398 fprintf (dump_file
, " Indirect ref read is not const\n");
399 if (local
->pure_const_state
== IPA_CONST
)
400 local
->pure_const_state
= IPA_PURE
;
404 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
407 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
408 int flags
, bool cannot_lead_to_return
)
411 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
414 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
415 fprintf (dump_file
, " looping");
417 if (flags
& ECF_CONST
)
420 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
421 fprintf (dump_file
, " const\n");
423 else if (flags
& ECF_PURE
)
426 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
427 fprintf (dump_file
, " pure\n");
429 else if (cannot_lead_to_return
)
433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
434 fprintf (dump_file
, " ignoring side effects->pure looping\n");
438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
439 fprintf (dump_file
, " neither\n");
440 *state
= IPA_NEITHER
;
445 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
446 into STATE and LOOPING better of the two variants.
447 Be sure to merge looping correctly. IPA_NEITHER functions
448 have looping 0 even if they don't have to return. */
451 better_state (enum pure_const_state_e
*state
, bool *looping
,
452 enum pure_const_state_e state2
, bool looping2
)
456 if (*state
== IPA_NEITHER
)
459 *looping
= MIN (*looping
, looping2
);
462 else if (state2
!= IPA_NEITHER
)
463 *looping
= MIN (*looping
, looping2
);
466 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
467 into STATE and LOOPING worse of the two variants. */
470 worse_state (enum pure_const_state_e
*state
, bool *looping
,
471 enum pure_const_state_e state2
, bool looping2
)
473 *state
= MAX (*state
, state2
);
474 *looping
= MAX (*looping
, looping2
);
477 /* Recognize special cases of builtins that are by themselves not pure or const
478 but function using them is. */
480 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
483 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
484 switch (DECL_FUNCTION_CODE (callee
))
486 case BUILT_IN_RETURN
:
487 case BUILT_IN_UNREACHABLE
:
488 case BUILT_IN_ALLOCA
:
489 case BUILT_IN_ALLOCA_WITH_ALIGN
:
490 case BUILT_IN_STACK_SAVE
:
491 case BUILT_IN_STACK_RESTORE
:
492 case BUILT_IN_EH_POINTER
:
493 case BUILT_IN_EH_FILTER
:
494 case BUILT_IN_UNWIND_RESUME
:
495 case BUILT_IN_CXA_END_CLEANUP
:
496 case BUILT_IN_EH_COPY_VALUES
:
497 case BUILT_IN_FRAME_ADDRESS
:
499 case BUILT_IN_APPLY_ARGS
:
503 case BUILT_IN_PREFETCH
:
513 /* Check the parameters of a function call to CALL_EXPR to see if
514 there are any references in the parameters that are not allowed for
515 pure or const functions. Also check to see if this is either an
516 indirect call, a call outside the compilation unit, or has special
517 attributes that may also effect the purity. The CALL_EXPR node for
518 the entire call expression. */
521 check_call (funct_state local
, gcall
*call
, bool ipa
)
523 int flags
= gimple_call_flags (call
);
524 tree callee_t
= gimple_call_fndecl (call
);
525 bool possibly_throws
= stmt_could_throw_p (call
);
526 bool possibly_throws_externally
= (possibly_throws
527 && stmt_can_throw_external (call
));
532 for (i
= 0; i
< gimple_num_ops (call
); i
++)
533 if (gimple_op (call
, i
)
534 && tree_could_throw_p (gimple_op (call
, i
)))
536 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
539 fprintf (dump_file
, " operand can throw; looping\n");
540 local
->looping
= true;
542 if (possibly_throws_externally
)
545 fprintf (dump_file
, " operand can throw externally\n");
546 local
->can_throw
= true;
551 /* The const and pure flags are set by a variety of places in the
552 compiler (including here). If someone has already set the flags
553 for the callee, (such as for some of the builtins) we will use
554 them, otherwise we will compute our own information.
556 Const and pure functions have less clobber effects than other
557 functions so we process these first. Otherwise if it is a call
558 outside the compilation unit or an indirect call we punt. This
559 leaves local calls which will be processed by following the call
563 enum pure_const_state_e call_state
;
566 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
567 && !nonfreeing_call_p (call
))
568 local
->can_free
= true;
570 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
572 worse_state (&local
->pure_const_state
, &local
->looping
,
573 call_state
, call_looping
);
576 /* When bad things happen to bad functions, they cannot be const
578 if (setjmp_call_p (callee_t
))
581 fprintf (dump_file
, " setjmp is not const/pure\n");
582 local
->looping
= true;
583 local
->pure_const_state
= IPA_NEITHER
;
586 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
587 switch (DECL_FUNCTION_CODE (callee_t
))
589 case BUILT_IN_LONGJMP
:
590 case BUILT_IN_NONLOCAL_GOTO
:
592 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
593 local
->pure_const_state
= IPA_NEITHER
;
594 local
->looping
= true;
600 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
601 local
->can_free
= true;
603 /* When not in IPA mode, we can still handle self recursion. */
605 && recursive_call_p (current_function_decl
, callee_t
))
608 fprintf (dump_file
, " Recursive call can loop.\n");
609 local
->looping
= true;
611 /* Either callee is unknown or we are doing local analysis.
612 Look to see if there are any bits available for the callee (such as by
613 declaration or because it is builtin) and process solely on the basis of
617 enum pure_const_state_e call_state
;
619 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
622 fprintf (dump_file
, " can throw; looping\n");
623 local
->looping
= true;
625 if (possibly_throws_externally
)
629 fprintf (dump_file
, " can throw externally to lp %i\n",
630 lookup_stmt_eh_lp (call
));
632 fprintf (dump_file
, " callee:%s\n",
633 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
635 local
->can_throw
= true;
637 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
638 fprintf (dump_file
, " checking flags for call:");
639 state_from_flags (&call_state
, &call_looping
, flags
,
640 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
641 == (ECF_NORETURN
| ECF_NOTHROW
))
642 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
643 worse_state (&local
->pure_const_state
, &local
->looping
,
644 call_state
, call_looping
);
646 /* Direct functions calls are handled by IPA propagation. */
649 /* Wrapper around check_decl for loads in local more. */
652 check_load (gimple
, tree op
, tree
, void *data
)
655 check_decl ((funct_state
)data
, op
, false, false);
657 check_op ((funct_state
)data
, op
, false);
661 /* Wrapper around check_decl for stores in local more. */
664 check_store (gimple
, tree op
, tree
, void *data
)
667 check_decl ((funct_state
)data
, op
, true, false);
669 check_op ((funct_state
)data
, op
, true);
673 /* Wrapper around check_decl for loads in ipa mode. */
676 check_ipa_load (gimple
, tree op
, tree
, void *data
)
679 check_decl ((funct_state
)data
, op
, false, true);
681 check_op ((funct_state
)data
, op
, false);
685 /* Wrapper around check_decl for stores in ipa mode. */
688 check_ipa_store (gimple
, tree op
, tree
, void *data
)
691 check_decl ((funct_state
)data
, op
, true, true);
693 check_op ((funct_state
)data
, op
, true);
697 /* Look into pointer pointed to by GSIP and figure out what interesting side
700 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
702 gimple stmt
= gsi_stmt (*gsip
);
704 if (is_gimple_debug (stmt
))
709 fprintf (dump_file
, " scanning: ");
710 print_gimple_stmt (dump_file
, stmt
, 0, 0);
713 if (gimple_has_volatile_ops (stmt
)
714 && !gimple_clobber_p (stmt
))
716 local
->pure_const_state
= IPA_NEITHER
;
718 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
721 /* Look for loads and stores. */
722 walk_stmt_load_store_ops (stmt
, local
,
723 ipa
? check_ipa_load
: check_load
,
724 ipa
? check_ipa_store
: check_store
);
726 if (gimple_code (stmt
) != GIMPLE_CALL
727 && stmt_could_throw_p (stmt
))
729 if (cfun
->can_throw_non_call_exceptions
)
732 fprintf (dump_file
, " can throw; looping\n");
733 local
->looping
= true;
735 if (stmt_can_throw_external (stmt
))
738 fprintf (dump_file
, " can throw externally\n");
739 local
->can_throw
= true;
743 fprintf (dump_file
, " can throw\n");
745 switch (gimple_code (stmt
))
748 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
751 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
752 /* Target of long jump. */
755 fprintf (dump_file
, " nonlocal label is not const/pure\n");
756 local
->pure_const_state
= IPA_NEITHER
;
760 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
763 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
764 /* Abandon all hope, ye who enter here. */
765 local
->pure_const_state
= IPA_NEITHER
;
766 local
->can_free
= true;
768 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
771 fprintf (dump_file
, " volatile is not const/pure\n");
772 /* Abandon all hope, ye who enter here. */
773 local
->pure_const_state
= IPA_NEITHER
;
774 local
->looping
= true;
775 local
->can_free
= true;
784 /* This is the main routine for finding the reference patterns for
785 global variables within a function FN. */
788 analyze_function (struct cgraph_node
*fn
, bool ipa
)
790 tree decl
= fn
->decl
;
792 basic_block this_block
;
794 l
= XCNEW (struct funct_state_d
);
795 l
->pure_const_state
= IPA_CONST
;
796 l
->state_previously_known
= IPA_NEITHER
;
797 l
->looping_previously_known
= true;
799 l
->can_throw
= false;
801 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
802 flags_from_decl_or_type (fn
->decl
),
803 fn
->cannot_return_p ());
805 if (fn
->thunk
.thunk_p
|| fn
->alias
)
807 /* Thunk gets propagated through, so nothing interesting happens. */
809 if (fn
->thunk
.thunk_p
&& fn
->thunk
.virtual_offset_p
)
810 l
->pure_const_state
= IPA_NEITHER
;
816 fprintf (dump_file
, "\n\n local analysis of %s\n ",
820 push_cfun (DECL_STRUCT_FUNCTION (decl
));
822 FOR_EACH_BB_FN (this_block
, cfun
)
824 gimple_stmt_iterator gsi
;
825 struct walk_stmt_info wi
;
827 memset (&wi
, 0, sizeof (wi
));
828 for (gsi
= gsi_start_bb (this_block
);
832 check_stmt (&gsi
, l
, ipa
);
833 if (l
->pure_const_state
== IPA_NEITHER
842 if (l
->pure_const_state
!= IPA_NEITHER
)
844 /* Const functions cannot have back edges (an
845 indication of possible infinite loop side
847 if (mark_dfs_back_edges ())
849 /* Preheaders are needed for SCEV to work.
850 Simple latches and recorded exits improve chances that loop will
851 proved to be finite in testcases such as in loop-15.c
853 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
854 | LOOPS_HAVE_SIMPLE_LATCHES
855 | LOOPS_HAVE_RECORDED_EXITS
);
856 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
857 flow_loops_dump (dump_file
, NULL
, 0);
858 if (mark_irreducible_loops ())
861 fprintf (dump_file
, " has irreducible loops\n");
868 FOR_EACH_LOOP (loop
, 0)
869 if (!finite_loop_p (loop
))
872 fprintf (dump_file
, " can not prove finiteness of "
873 "loop %i\n", loop
->num
);
879 loop_optimizer_finalize ();
883 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
884 fprintf (dump_file
, " checking previously known:");
886 better_state (&l
->pure_const_state
, &l
->looping
,
887 l
->state_previously_known
,
888 l
->looping_previously_known
);
889 if (TREE_NOTHROW (decl
))
890 l
->can_throw
= false;
896 fprintf (dump_file
, "Function is locally looping.\n");
898 fprintf (dump_file
, "Function is locally throwing.\n");
899 if (l
->pure_const_state
== IPA_CONST
)
900 fprintf (dump_file
, "Function is locally const.\n");
901 if (l
->pure_const_state
== IPA_PURE
)
902 fprintf (dump_file
, "Function is locally pure.\n");
904 fprintf (dump_file
, "Function can locally free.\n");
909 /* Called when new function is inserted to callgraph late. */
911 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
913 if (node
->get_availability () < AVAIL_INTERPOSABLE
)
915 /* There are some shared nodes, in particular the initializers on
916 static declarations. We do not need to scan them more than once
917 since all we would be interested in are the addressof
919 if (node
->get_availability () > AVAIL_INTERPOSABLE
920 && opt_for_fn (node
->decl
, flag_ipa_pure_const
))
921 set_function_state (node
, analyze_function (node
, true));
924 /* Called when new clone is inserted to callgraph late. */
927 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
928 void *data ATTRIBUTE_UNUSED
)
930 if (has_function_state (src
))
932 funct_state l
= XNEW (struct funct_state_d
);
933 gcc_assert (!has_function_state (dst
));
934 memcpy (l
, get_function_state (src
), sizeof (*l
));
935 set_function_state (dst
, l
);
939 /* Called when new clone is inserted to callgraph late. */
942 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
944 if (has_function_state (node
))
946 funct_state l
= get_function_state (node
);
947 if (l
!= &varying_state
)
949 set_function_state (node
, NULL
);
955 pass_ipa_pure_const::
956 register_hooks (void)
963 node_removal_hook_holder
=
964 symtab
->add_cgraph_removal_hook (&remove_node_data
, NULL
);
965 node_duplication_hook_holder
=
966 symtab
->add_cgraph_duplication_hook (&duplicate_node_data
, NULL
);
967 function_insertion_hook_holder
=
968 symtab
->add_cgraph_insertion_hook (&add_new_function
, NULL
);
972 /* Analyze each function in the cgraph to see if it is locally PURE or
976 pure_const_generate_summary (void)
978 struct cgraph_node
*node
;
980 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
981 pass
->register_hooks ();
983 /* Process all of the functions.
985 We process AVAIL_INTERPOSABLE functions. We can not use the results
986 by default, but the info can be used at LTO with -fwhole-program or
987 when function got cloned and the clone is AVAILABLE. */
989 FOR_EACH_DEFINED_FUNCTION (node
)
990 if (node
->get_availability () >= AVAIL_INTERPOSABLE
991 && opt_for_fn (node
->decl
, flag_ipa_pure_const
))
992 set_function_state (node
, analyze_function (node
, true));
996 /* Serialize the ipa info for lto. */
999 pure_const_write_summary (void)
1001 struct cgraph_node
*node
;
1002 struct lto_simple_output_block
*ob
1003 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1004 unsigned int count
= 0;
1005 lto_symtab_encoder_iterator lsei
;
1006 lto_symtab_encoder_t encoder
;
1008 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1010 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1011 lsei_next_function_in_partition (&lsei
))
1013 node
= lsei_cgraph_node (lsei
);
1014 if (node
->definition
&& has_function_state (node
))
1018 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1020 /* Process all of the functions. */
1021 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1022 lsei_next_function_in_partition (&lsei
))
1024 node
= lsei_cgraph_node (lsei
);
1025 if (node
->definition
&& has_function_state (node
))
1027 struct bitpack_d bp
;
1030 lto_symtab_encoder_t encoder
;
1032 fs
= get_function_state (node
);
1034 encoder
= ob
->decl_state
->symtab_node_encoder
;
1035 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1036 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1038 /* Note that flags will need to be read in the opposite
1039 order as we are pushing the bitflags into FLAGS. */
1040 bp
= bitpack_create (ob
->main_stream
);
1041 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1042 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1043 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1044 bp_pack_value (&bp
, fs
->looping
, 1);
1045 bp_pack_value (&bp
, fs
->can_throw
, 1);
1046 bp_pack_value (&bp
, fs
->can_free
, 1);
1047 streamer_write_bitpack (&bp
);
1051 lto_destroy_simple_output_block (ob
);
1055 /* Deserialize the ipa info for lto. */
1058 pure_const_read_summary (void)
1060 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1061 struct lto_file_decl_data
*file_data
;
1064 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1065 pass
->register_hooks ();
1067 while ((file_data
= file_data_vec
[j
++]))
1071 struct lto_input_block
*ib
1072 = lto_create_simple_input_block (file_data
,
1073 LTO_section_ipa_pure_const
,
1078 unsigned int count
= streamer_read_uhwi (ib
);
1080 for (i
= 0; i
< count
; i
++)
1083 struct cgraph_node
*node
;
1084 struct bitpack_d bp
;
1086 lto_symtab_encoder_t encoder
;
1088 fs
= XCNEW (struct funct_state_d
);
1089 index
= streamer_read_uhwi (ib
);
1090 encoder
= file_data
->symtab_node_encoder
;
1091 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1093 set_function_state (node
, fs
);
1095 /* Note that the flags must be read in the opposite
1096 order in which they were written (the bitflags were
1097 pushed into FLAGS). */
1098 bp
= streamer_read_bitpack (ib
);
1099 fs
->pure_const_state
1100 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1101 fs
->state_previously_known
1102 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1103 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1104 fs
->looping
= bp_unpack_value (&bp
, 1);
1105 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1106 fs
->can_free
= bp_unpack_value (&bp
, 1);
1109 int flags
= flags_from_decl_or_type (node
->decl
);
1110 fprintf (dump_file
, "Read info for %s/%i ",
1113 if (flags
& ECF_CONST
)
1114 fprintf (dump_file
, " const");
1115 if (flags
& ECF_PURE
)
1116 fprintf (dump_file
, " pure");
1117 if (flags
& ECF_NOTHROW
)
1118 fprintf (dump_file
, " nothrow");
1119 fprintf (dump_file
, "\n pure const state: %s\n",
1120 pure_const_names
[fs
->pure_const_state
]);
1121 fprintf (dump_file
, " previously known state: %s\n",
1122 pure_const_names
[fs
->looping_previously_known
]);
1124 fprintf (dump_file
," function is locally looping\n");
1125 if (fs
->looping_previously_known
)
1126 fprintf (dump_file
," function is previously known looping\n");
1128 fprintf (dump_file
," function is locally throwing\n");
1130 fprintf (dump_file
," function can locally free\n");
1134 lto_destroy_simple_input_block (file_data
,
1135 LTO_section_ipa_pure_const
,
1143 ignore_edge (struct cgraph_edge
*e
)
1145 return (!e
->can_throw_external
);
1148 /* Return true if NODE is self recursive function.
1149 Indirectly recursive functions appears as non-trivial strongly
1150 connected components, so we need to care about self recursion
1154 self_recursive_p (struct cgraph_node
*node
)
1156 struct cgraph_edge
*e
;
1157 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1158 if (e
->callee
->function_symbol () == node
)
1163 /* Return true if N is cdtor that is not const or pure. In this case we may
1164 need to remove unreachable function if it is marked const/pure. */
1167 cdtor_p (cgraph_node
*n
, void *)
1169 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1170 return !TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
);
1174 /* Produce transitive closure over the callgraph and compute pure/const
1178 propagate_pure_const (void)
1180 struct cgraph_node
*node
;
1181 struct cgraph_node
*w
;
1182 struct cgraph_node
**order
=
1183 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1186 struct ipa_dfs_info
* w_info
;
1187 bool remove_p
= false;
1189 order_pos
= ipa_reduced_postorder (order
, true, false, NULL
);
1192 cgraph_node::dump_cgraph (dump_file
);
1193 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1196 /* Propagate the local information through the call graph to produce
1197 the global information. All the nodes within a cycle will have
1198 the same info so we collapse cycles first. Then we can do the
1199 propagation in one pass from the leaves to the roots. */
1200 for (i
= 0; i
< order_pos
; i
++ )
1202 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1203 bool looping
= false;
1210 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1211 fprintf (dump_file
, "Starting cycle\n");
1213 /* Find the worst state for any node in the cycle. */
1215 while (w
&& pure_const_state
!= IPA_NEITHER
)
1217 struct cgraph_edge
*e
;
1218 struct cgraph_edge
*ie
;
1220 struct ipa_ref
*ref
= NULL
;
1222 funct_state w_l
= get_function_state (w
);
1223 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1224 fprintf (dump_file
, " Visiting %s/%i state:%s looping %i\n",
1227 pure_const_names
[w_l
->pure_const_state
],
1230 /* First merge in function body properties. */
1231 worse_state (&pure_const_state
, &looping
,
1232 w_l
->pure_const_state
, w_l
->looping
);
1233 if (pure_const_state
== IPA_NEITHER
)
1236 /* For overwritable nodes we can not assume anything. */
1237 if (w
->get_availability () == AVAIL_INTERPOSABLE
)
1239 worse_state (&pure_const_state
, &looping
,
1240 w_l
->state_previously_known
,
1241 w_l
->looping_previously_known
);
1242 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1245 " Overwritable. state %s looping %i\n",
1246 pure_const_names
[w_l
->state_previously_known
],
1247 w_l
->looping_previously_known
);
1254 /* We consider recursive cycles as possibly infinite.
1255 This might be relaxed since infinite recursion leads to stack
1260 /* Now walk the edges and merge in callee properties. */
1261 for (e
= w
->callees
; e
; e
= e
->next_callee
)
1263 enum availability avail
;
1264 struct cgraph_node
*y
= e
->callee
->
1265 function_or_virtual_thunk_symbol (&avail
);
1266 enum pure_const_state_e edge_state
= IPA_CONST
;
1267 bool edge_looping
= false;
1269 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1276 if (avail
> AVAIL_INTERPOSABLE
)
1278 funct_state y_l
= get_function_state (y
);
1279 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1282 " state:%s looping:%i\n",
1283 pure_const_names
[y_l
->pure_const_state
],
1286 if (y_l
->pure_const_state
> IPA_PURE
1287 && e
->cannot_lead_to_return_p ())
1289 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1291 " Ignoring side effects"
1292 " -> pure, looping\n");
1293 edge_state
= IPA_PURE
;
1294 edge_looping
= true;
1298 edge_state
= y_l
->pure_const_state
;
1299 edge_looping
= y_l
->looping
;
1302 else if (special_builtin_state (&edge_state
, &edge_looping
,
1306 state_from_flags (&edge_state
, &edge_looping
,
1307 flags_from_decl_or_type (y
->decl
),
1308 e
->cannot_lead_to_return_p ());
1310 /* Merge the results with what we already know. */
1311 better_state (&edge_state
, &edge_looping
,
1312 w_l
->state_previously_known
,
1313 w_l
->looping_previously_known
);
1314 worse_state (&pure_const_state
, &looping
,
1315 edge_state
, edge_looping
);
1316 if (pure_const_state
== IPA_NEITHER
)
1319 if (pure_const_state
== IPA_NEITHER
)
1322 /* Now process the indirect call. */
1323 for (ie
= w
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1325 enum pure_const_state_e edge_state
= IPA_CONST
;
1326 bool edge_looping
= false;
1328 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1329 fprintf (dump_file
, " Indirect call");
1330 state_from_flags (&edge_state
, &edge_looping
,
1331 ie
->indirect_info
->ecf_flags
,
1332 ie
->cannot_lead_to_return_p ());
1333 /* Merge the results with what we already know. */
1334 better_state (&edge_state
, &edge_looping
,
1335 w_l
->state_previously_known
,
1336 w_l
->looping_previously_known
);
1337 worse_state (&pure_const_state
, &looping
,
1338 edge_state
, edge_looping
);
1339 if (pure_const_state
== IPA_NEITHER
)
1342 if (pure_const_state
== IPA_NEITHER
)
1345 /* And finally all loads and stores. */
1346 for (i
= 0; w
->iterate_reference (i
, ref
); i
++)
1348 enum pure_const_state_e ref_state
= IPA_CONST
;
1349 bool ref_looping
= false;
1353 /* readonly reads are safe. */
1354 if (TREE_READONLY (ref
->referred
->decl
))
1356 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1357 fprintf (dump_file
, " nonreadonly global var read\n");
1358 ref_state
= IPA_PURE
;
1361 if (ref
->cannot_lead_to_return ())
1363 ref_state
= IPA_NEITHER
;
1364 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1365 fprintf (dump_file
, " global var write\n");
1373 better_state (&ref_state
, &ref_looping
,
1374 w_l
->state_previously_known
,
1375 w_l
->looping_previously_known
);
1376 worse_state (&pure_const_state
, &looping
,
1377 ref_state
, ref_looping
);
1378 if (pure_const_state
== IPA_NEITHER
)
1381 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1382 w
= w_info
->next_cycle
;
1384 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1385 fprintf (dump_file
, "Result %s looping %i\n",
1386 pure_const_names
[pure_const_state
],
1389 /* Find the worst state of can_free for any node in the cycle. */
1390 bool can_free
= false;
1392 while (w
&& !can_free
)
1394 struct cgraph_edge
*e
;
1395 funct_state w_l
= get_function_state (w
);
1398 || w
->get_availability () == AVAIL_INTERPOSABLE
1399 || w
->indirect_calls
)
1402 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1404 enum availability avail
;
1405 struct cgraph_node
*y
= e
->callee
->
1406 function_or_virtual_thunk_symbol (&avail
);
1408 if (avail
> AVAIL_INTERPOSABLE
)
1409 can_free
= get_function_state (y
)->can_free
;
1413 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1414 w
= w_info
->next_cycle
;
1417 /* Copy back the region's pure_const_state which is shared by
1418 all nodes in the region. */
1422 funct_state w_l
= get_function_state (w
);
1423 enum pure_const_state_e this_state
= pure_const_state
;
1424 bool this_looping
= looping
;
1426 w_l
->can_free
= can_free
;
1427 w
->nonfreeing_fn
= !can_free
;
1428 if (!can_free
&& dump_file
)
1429 fprintf (dump_file
, "Function found not to call free: %s\n",
1432 if (w_l
->state_previously_known
!= IPA_NEITHER
1433 && this_state
> w_l
->state_previously_known
)
1435 this_state
= w_l
->state_previously_known
;
1436 this_looping
|= w_l
->looping_previously_known
;
1438 if (!this_looping
&& self_recursive_p (w
))
1439 this_looping
= true;
1440 if (!w_l
->looping_previously_known
)
1441 this_looping
= false;
1443 /* All nodes within a cycle share the same info. */
1444 w_l
->pure_const_state
= this_state
;
1445 w_l
->looping
= this_looping
;
1447 /* Inline clones share declaration with their offline copies;
1448 do not modify their declarations since the offline copy may
1450 if (!w
->global
.inlined_to
)
1454 if (!TREE_READONLY (w
->decl
))
1456 warn_function_const (w
->decl
, !this_looping
);
1458 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1459 this_looping
? "looping " : "",
1462 remove_p
|= w
->call_for_symbol_and_aliases (cdtor_p
,
1464 w
->set_const_flag (true, this_looping
);
1468 if (!DECL_PURE_P (w
->decl
))
1470 warn_function_pure (w
->decl
, !this_looping
);
1472 fprintf (dump_file
, "Function found to be %spure: %s\n",
1473 this_looping
? "looping " : "",
1476 remove_p
|= w
->call_for_symbol_and_aliases (cdtor_p
,
1478 w
->set_pure_flag (true, this_looping
);
1484 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1485 w
= w_info
->next_cycle
;
1489 ipa_free_postorder_info ();
1494 /* Produce transitive closure over the callgraph and compute nothrow
1498 propagate_nothrow (void)
1500 struct cgraph_node
*node
;
1501 struct cgraph_node
*w
;
1502 struct cgraph_node
**order
=
1503 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1506 struct ipa_dfs_info
* w_info
;
1508 order_pos
= ipa_reduced_postorder (order
, true, false, ignore_edge
);
1511 cgraph_node::dump_cgraph (dump_file
);
1512 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1515 /* Propagate the local information through the call graph to produce
1516 the global information. All the nodes within a cycle will have
1517 the same info so we collapse cycles first. Then we can do the
1518 propagation in one pass from the leaves to the roots. */
1519 for (i
= 0; i
< order_pos
; i
++ )
1521 bool can_throw
= false;
1527 /* Find the worst state for any node in the cycle. */
1529 while (w
&& !can_throw
)
1531 struct cgraph_edge
*e
, *ie
;
1532 funct_state w_l
= get_function_state (w
);
1535 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1538 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1540 enum availability avail
;
1541 struct cgraph_node
*y
= e
->callee
->
1542 function_or_virtual_thunk_symbol (&avail
);
1544 if (avail
> AVAIL_INTERPOSABLE
)
1546 funct_state y_l
= get_function_state (y
);
1548 if (y_l
->can_throw
&& !TREE_NOTHROW (w
->decl
)
1549 && e
->can_throw_external
)
1552 else if (e
->can_throw_external
&& !TREE_NOTHROW (y
->decl
))
1555 for (ie
= w
->indirect_calls
; ie
&& !can_throw
; ie
= ie
->next_callee
)
1556 if (ie
->can_throw_external
)
1558 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1559 w
= w_info
->next_cycle
;
1562 /* Copy back the region's pure_const_state which is shared by
1563 all nodes in the region. */
1567 funct_state w_l
= get_function_state (w
);
1568 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1570 /* Inline clones share declaration with their offline copies;
1571 do not modify their declarations since the offline copy may
1573 if (!w
->global
.inlined_to
)
1575 w
->set_nothrow_flag (true);
1577 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1581 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1582 w_l
->can_throw
= true;
1583 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1584 w
= w_info
->next_cycle
;
1588 ipa_free_postorder_info ();
1593 /* Produce the global information by preforming a transitive closure
1594 on the local information that was produced by generate_summary. */
1597 pass_ipa_pure_const::
1598 execute (function
*)
1600 struct cgraph_node
*node
;
1603 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
1604 symtab
->remove_cgraph_duplication_hook (node_duplication_hook_holder
);
1605 symtab
->remove_cgraph_removal_hook (node_removal_hook_holder
);
1607 /* Nothrow makes more function to not lead to return and improve
1609 propagate_nothrow ();
1610 remove_p
= propagate_pure_const ();
1613 FOR_EACH_FUNCTION (node
)
1614 if (has_function_state (node
))
1615 free (get_function_state (node
));
1616 funct_state_vec
.release ();
1617 return remove_p
? TODO_remove_functions
: 0;
1621 gate_pure_const (void)
1623 return flag_ipa_pure_const
|| in_lto_p
;
1626 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
1627 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
1628 pure_const_generate_summary
, /* generate_summary */
1629 pure_const_write_summary
, /* write_summary */
1630 pure_const_read_summary
, /* read_summary */
1631 NULL
, /* write_optimization_summary */
1632 NULL
, /* read_optimization_summary */
1633 NULL
, /* stmt_fixup */
1634 0, /* function_transform_todo_flags_start */
1635 NULL
, /* function_transform */
1636 NULL
), /* variable_transform */
1638 function_insertion_hook_holder(NULL
),
1639 node_duplication_hook_holder(NULL
),
1640 node_removal_hook_holder(NULL
)
1645 make_pass_ipa_pure_const (gcc::context
*ctxt
)
1647 return new pass_ipa_pure_const (ctxt
);
1650 /* Return true if function should be skipped for local pure const analysis. */
1653 skip_function_for_local_pure_const (struct cgraph_node
*node
)
1655 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1656 we must not promote functions that are called by already processed functions. */
1658 if (function_called_by_processed_nodes_p ())
1661 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
1664 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
1667 fprintf (dump_file
, "Function is not available or overwritable; not analyzing.\n");
1673 /* Simple local pass for pure const discovery reusing the analysis from
1674 ipa_pure_const. This pass is effective when executed together with
1675 other optimization passes in early optimization pass queue. */
1679 const pass_data pass_data_local_pure_const
=
1681 GIMPLE_PASS
, /* type */
1682 "local-pure-const", /* name */
1683 OPTGROUP_NONE
, /* optinfo_flags */
1684 TV_IPA_PURE_CONST
, /* tv_id */
1685 0, /* properties_required */
1686 0, /* properties_provided */
1687 0, /* properties_destroyed */
1688 0, /* todo_flags_start */
1689 0, /* todo_flags_finish */
1692 class pass_local_pure_const
: public gimple_opt_pass
1695 pass_local_pure_const (gcc::context
*ctxt
)
1696 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
1699 /* opt_pass methods: */
1700 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
1701 virtual bool gate (function
*) { return gate_pure_const (); }
1702 virtual unsigned int execute (function
*);
1704 }; // class pass_local_pure_const
1707 pass_local_pure_const::execute (function
*fun
)
1709 bool changed
= false;
1712 struct cgraph_node
*node
;
1714 node
= cgraph_node::get (current_function_decl
);
1715 skip
= skip_function_for_local_pure_const (node
);
1716 if (!warn_suggest_attribute_const
1717 && !warn_suggest_attribute_pure
1721 l
= analyze_function (node
, false);
1723 /* Do NORETURN discovery. */
1724 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
1725 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
1727 warn_function_noreturn (fun
->decl
);
1729 fprintf (dump_file
, "Function found to be noreturn: %s\n",
1730 current_function_name ());
1732 /* Update declaration and reduce profile to executed once. */
1733 TREE_THIS_VOLATILE (current_function_decl
) = 1;
1734 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
1735 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
1740 switch (l
->pure_const_state
)
1743 if (!TREE_READONLY (current_function_decl
))
1745 warn_function_const (current_function_decl
, !l
->looping
);
1748 node
->set_const_flag (true, l
->looping
);
1752 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1753 l
->looping
? "looping " : "",
1754 current_function_name ());
1756 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1761 node
->set_const_flag (true, false);
1765 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1766 current_function_name ());
1771 if (!DECL_PURE_P (current_function_decl
))
1775 node
->set_pure_flag (true, l
->looping
);
1778 warn_function_pure (current_function_decl
, !l
->looping
);
1780 fprintf (dump_file
, "Function found to be %spure: %s\n",
1781 l
->looping
? "looping " : "",
1782 current_function_name ());
1784 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1789 node
->set_pure_flag (true, false);
1793 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1794 current_function_name ());
1801 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
1803 node
->set_nothrow_flag (true);
1806 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1807 current_function_name ());
1811 return execute_fixup_cfg ();
1819 make_pass_local_pure_const (gcc::context
*ctxt
)
1821 return new pass_local_pure_const (ctxt
);
1824 /* Emit noreturn warnings. */
1828 const pass_data pass_data_warn_function_noreturn
=
1830 GIMPLE_PASS
, /* type */
1831 "*warn_function_noreturn", /* name */
1832 OPTGROUP_NONE
, /* optinfo_flags */
1833 TV_NONE
, /* tv_id */
1834 PROP_cfg
, /* properties_required */
1835 0, /* properties_provided */
1836 0, /* properties_destroyed */
1837 0, /* todo_flags_start */
1838 0, /* todo_flags_finish */
1841 class pass_warn_function_noreturn
: public gimple_opt_pass
1844 pass_warn_function_noreturn (gcc::context
*ctxt
)
1845 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
1848 /* opt_pass methods: */
1849 virtual bool gate (function
*) { return warn_suggest_attribute_noreturn
; }
1850 virtual unsigned int execute (function
*fun
)
1852 if (!TREE_THIS_VOLATILE (current_function_decl
)
1853 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
1854 warn_function_noreturn (current_function_decl
);
1858 }; // class pass_warn_function_noreturn
1863 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
1865 return new pass_warn_function_noreturn (ctxt
);