1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
41 #include "fold-const.h"
42 #include "print-tree.h"
45 #include "hard-reg-set.h"
47 #include "dominance.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
54 #include "gimple-expr.h"
56 #include "gimple-iterator.h"
57 #include "gimple-walk.h"
59 #include "tree-ssa-loop-niter.h"
60 #include "tree-inline.h"
61 #include "tree-pass.h"
62 #include "langhooks.h"
63 #include "plugin-api.h"
66 #include "ipa-utils.h"
68 #include "diagnostic.h"
69 #include "gimple-pretty-print.h"
70 #include "langhooks.h"
72 #include "lto-streamer.h"
73 #include "data-streamer.h"
74 #include "tree-streamer.h"
76 #include "tree-scalar-evolution.h"
81 /* Lattice values for const and pure functions. Everything starts out
82 being const, then may drop to pure and then neither depending on
84 enum pure_const_state_e
91 const char *pure_const_names
[3] = {"const", "pure", "neither"};
93 /* Holder for the const_state. There is one of these per function
98 enum pure_const_state_e pure_const_state
;
99 /* What user set here; we can be always sure about this. */
100 enum pure_const_state_e state_previously_known
;
101 bool looping_previously_known
;
103 /* True if the function could possibly infinite loop. There are a
104 lot of ways that this could be determined. We are pretty
105 conservative here. While it is possible to cse pure and const
106 calls, it is not legal to have dce get rid of the call if there
107 is a possibility that the call could infinite loop since this is
108 a behavioral change. */
113 /* If function can call free, munmap or otherwise make previously
114 non-trapping memory accesses trapping. */
118 /* State used when we know nothing about function. */
119 static struct funct_state_d varying_state
120 = { IPA_NEITHER
, IPA_NEITHER
, true, true, true, true };
123 typedef struct funct_state_d
* funct_state
;
125 /* The storage of the funct_state is abstracted because there is the
126 possibility that it may be desirable to move this to the cgraph
129 /* Array, indexed by cgraph node uid, of function states. */
131 static vec
<funct_state
> funct_state_vec
;
133 static bool gate_pure_const (void);
137 const pass_data pass_data_ipa_pure_const
=
140 "pure-const", /* name */
141 OPTGROUP_NONE
, /* optinfo_flags */
142 TV_IPA_PURE_CONST
, /* tv_id */
143 0, /* properties_required */
144 0, /* properties_provided */
145 0, /* properties_destroyed */
146 0, /* todo_flags_start */
147 0, /* todo_flags_finish */
150 class pass_ipa_pure_const
: public ipa_opt_pass_d
153 pass_ipa_pure_const(gcc::context
*ctxt
);
155 /* opt_pass methods: */
156 bool gate (function
*) { return gate_pure_const (); }
157 unsigned int execute (function
*fun
);
159 void register_hooks (void);
164 /* Holders of ipa cgraph hooks: */
165 struct cgraph_node_hook_list
*function_insertion_hook_holder
;
166 struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
167 struct cgraph_node_hook_list
*node_removal_hook_holder
;
169 }; // class pass_ipa_pure_const
173 /* Try to guess if function body will always be visible to compiler
174 when compiling the call and whether compiler will be able
175 to propagate the information by itself. */
178 function_always_visible_to_compiler_p (tree decl
)
180 return (!TREE_PUBLIC (decl
) || DECL_DECLARED_INLINE_P (decl
));
183 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
184 is true if the function is known to be finite. The diagnostic is
185 controlled by OPTION. WARNED_ABOUT is a hash_set<tree> unique for
186 OPTION, this function may initialize it and it is always returned
189 static hash_set
<tree
> *
190 suggest_attribute (int option
, tree decl
, bool known_finite
,
191 hash_set
<tree
> *warned_about
,
192 const char * attrib_name
)
194 if (!option_enabled (option
, &global_options
))
196 if (TREE_THIS_VOLATILE (decl
)
197 || (known_finite
&& function_always_visible_to_compiler_p (decl
)))
201 warned_about
= new hash_set
<tree
>;
202 if (warned_about
->contains (decl
))
204 warned_about
->add (decl
);
205 warning_at (DECL_SOURCE_LOCATION (decl
),
208 ? _("function might be candidate for attribute %<%s%>")
209 : _("function might be candidate for attribute %<%s%>"
210 " if it is known to return normally"), attrib_name
);
214 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
215 is true if the function is known to be finite. */
218 warn_function_pure (tree decl
, bool known_finite
)
220 static hash_set
<tree
> *warned_about
;
223 = suggest_attribute (OPT_Wsuggest_attribute_pure
, decl
,
224 known_finite
, warned_about
, "pure");
227 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
228 is true if the function is known to be finite. */
231 warn_function_const (tree decl
, bool known_finite
)
233 static hash_set
<tree
> *warned_about
;
235 = suggest_attribute (OPT_Wsuggest_attribute_const
, decl
,
236 known_finite
, warned_about
, "const");
240 warn_function_noreturn (tree decl
)
242 static hash_set
<tree
> *warned_about
;
243 if (!lang_hooks
.missing_noreturn_ok_p (decl
)
244 && targetm
.warn_func_return (decl
))
246 = suggest_attribute (OPT_Wsuggest_attribute_noreturn
, decl
,
247 true, warned_about
, "noreturn");
250 /* Return true if we have a function state for NODE. */
253 has_function_state (struct cgraph_node
*node
)
255 if (!funct_state_vec
.exists ()
256 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
258 return funct_state_vec
[node
->uid
] != NULL
;
261 /* Return the function state from NODE. */
263 static inline funct_state
264 get_function_state (struct cgraph_node
*node
)
266 if (!funct_state_vec
.exists ()
267 || funct_state_vec
.length () <= (unsigned int)node
->uid
268 || !funct_state_vec
[node
->uid
])
269 /* We might want to put correct previously_known state into varying. */
270 return &varying_state
;
271 return funct_state_vec
[node
->uid
];
274 /* Set the function state S for NODE. */
277 set_function_state (struct cgraph_node
*node
, funct_state s
)
279 if (!funct_state_vec
.exists ()
280 || funct_state_vec
.length () <= (unsigned int)node
->uid
)
281 funct_state_vec
.safe_grow_cleared (node
->uid
+ 1);
282 funct_state_vec
[node
->uid
] = s
;
285 /* Check to see if the use (or definition when CHECKING_WRITE is true)
286 variable T is legal in a function that is either pure or const. */
289 check_decl (funct_state local
,
290 tree t
, bool checking_write
, bool ipa
)
292 /* Do not want to do anything with volatile except mark any
293 function that uses one to be not const or pure. */
294 if (TREE_THIS_VOLATILE (t
))
296 local
->pure_const_state
= IPA_NEITHER
;
298 fprintf (dump_file
, " Volatile operand is not const/pure");
302 /* Do not care about a local automatic that is not static. */
303 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
306 /* If the variable has the "used" attribute, treat it as if it had a
307 been touched by the devil. */
308 if (DECL_PRESERVE_P (t
))
310 local
->pure_const_state
= IPA_NEITHER
;
312 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
316 /* In IPA mode we are not interested in checking actual loads and stores;
317 they will be processed at propagation time using ipa_ref. */
321 /* Since we have dealt with the locals and params cases above, if we
322 are CHECKING_WRITE, this cannot be a pure or constant
326 local
->pure_const_state
= IPA_NEITHER
;
328 fprintf (dump_file
, " static/global memory write is not const/pure\n");
332 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
334 /* Readonly reads are safe. */
335 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
336 return; /* Read of a constant, do not change the function state. */
340 fprintf (dump_file
, " global memory read is not const\n");
341 /* Just a regular read. */
342 if (local
->pure_const_state
== IPA_CONST
)
343 local
->pure_const_state
= IPA_PURE
;
348 /* Compilation level statics can be read if they are readonly
350 if (TREE_READONLY (t
))
354 fprintf (dump_file
, " static memory read is not const\n");
355 /* Just a regular read. */
356 if (local
->pure_const_state
== IPA_CONST
)
357 local
->pure_const_state
= IPA_PURE
;
362 /* Check to see if the use (or definition when CHECKING_WRITE is true)
363 variable T is legal in a function that is either pure or const. */
366 check_op (funct_state local
, tree t
, bool checking_write
)
368 t
= get_base_address (t
);
369 if (t
&& TREE_THIS_VOLATILE (t
))
371 local
->pure_const_state
= IPA_NEITHER
;
373 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
377 && (INDIRECT_REF_P (t
) || TREE_CODE (t
) == MEM_REF
)
378 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
379 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
382 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
385 else if (checking_write
)
387 local
->pure_const_state
= IPA_NEITHER
;
389 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
395 fprintf (dump_file
, " Indirect ref read is not const\n");
396 if (local
->pure_const_state
== IPA_CONST
)
397 local
->pure_const_state
= IPA_PURE
;
401 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
404 state_from_flags (enum pure_const_state_e
*state
, bool *looping
,
405 int flags
, bool cannot_lead_to_return
)
408 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
412 fprintf (dump_file
, " looping");
414 if (flags
& ECF_CONST
)
417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
418 fprintf (dump_file
, " const\n");
420 else if (flags
& ECF_PURE
)
423 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
424 fprintf (dump_file
, " pure\n");
426 else if (cannot_lead_to_return
)
430 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
431 fprintf (dump_file
, " ignoring side effects->pure looping\n");
435 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
436 fprintf (dump_file
, " neither\n");
437 *state
= IPA_NEITHER
;
442 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
443 into STATE and LOOPING better of the two variants.
444 Be sure to merge looping correctly. IPA_NEITHER functions
445 have looping 0 even if they don't have to return. */
448 better_state (enum pure_const_state_e
*state
, bool *looping
,
449 enum pure_const_state_e state2
, bool looping2
)
453 if (*state
== IPA_NEITHER
)
456 *looping
= MIN (*looping
, looping2
);
459 else if (state2
!= IPA_NEITHER
)
460 *looping
= MIN (*looping
, looping2
);
463 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
464 into STATE and LOOPING worse of the two variants. */
467 worse_state (enum pure_const_state_e
*state
, bool *looping
,
468 enum pure_const_state_e state2
, bool looping2
)
470 *state
= MAX (*state
, state2
);
471 *looping
= MAX (*looping
, looping2
);
474 /* Recognize special cases of builtins that are by themselves not pure or const
475 but function using them is. */
477 special_builtin_state (enum pure_const_state_e
*state
, bool *looping
,
480 if (DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
481 switch (DECL_FUNCTION_CODE (callee
))
483 case BUILT_IN_RETURN
:
484 case BUILT_IN_UNREACHABLE
:
485 case BUILT_IN_ALLOCA
:
486 case BUILT_IN_ALLOCA_WITH_ALIGN
:
487 case BUILT_IN_STACK_SAVE
:
488 case BUILT_IN_STACK_RESTORE
:
489 case BUILT_IN_EH_POINTER
:
490 case BUILT_IN_EH_FILTER
:
491 case BUILT_IN_UNWIND_RESUME
:
492 case BUILT_IN_CXA_END_CLEANUP
:
493 case BUILT_IN_EH_COPY_VALUES
:
494 case BUILT_IN_FRAME_ADDRESS
:
496 case BUILT_IN_APPLY_ARGS
:
500 case BUILT_IN_PREFETCH
:
510 /* Check the parameters of a function call to CALL_EXPR to see if
511 there are any references in the parameters that are not allowed for
512 pure or const functions. Also check to see if this is either an
513 indirect call, a call outside the compilation unit, or has special
514 attributes that may also effect the purity. The CALL_EXPR node for
515 the entire call expression. */
518 check_call (funct_state local
, gcall
*call
, bool ipa
)
520 int flags
= gimple_call_flags (call
);
521 tree callee_t
= gimple_call_fndecl (call
);
522 bool possibly_throws
= stmt_could_throw_p (call
);
523 bool possibly_throws_externally
= (possibly_throws
524 && stmt_can_throw_external (call
));
529 for (i
= 0; i
< gimple_num_ops (call
); i
++)
530 if (gimple_op (call
, i
)
531 && tree_could_throw_p (gimple_op (call
, i
)))
533 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
536 fprintf (dump_file
, " operand can throw; looping\n");
537 local
->looping
= true;
539 if (possibly_throws_externally
)
542 fprintf (dump_file
, " operand can throw externally\n");
543 local
->can_throw
= true;
548 /* The const and pure flags are set by a variety of places in the
549 compiler (including here). If someone has already set the flags
550 for the callee, (such as for some of the builtins) we will use
551 them, otherwise we will compute our own information.
553 Const and pure functions have less clobber effects than other
554 functions so we process these first. Otherwise if it is a call
555 outside the compilation unit or an indirect call we punt. This
556 leaves local calls which will be processed by following the call
560 enum pure_const_state_e call_state
;
563 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
564 && !nonfreeing_call_p (call
))
565 local
->can_free
= true;
567 if (special_builtin_state (&call_state
, &call_looping
, callee_t
))
569 worse_state (&local
->pure_const_state
, &local
->looping
,
570 call_state
, call_looping
);
573 /* When bad things happen to bad functions, they cannot be const
575 if (setjmp_call_p (callee_t
))
578 fprintf (dump_file
, " setjmp is not const/pure\n");
579 local
->looping
= true;
580 local
->pure_const_state
= IPA_NEITHER
;
583 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
584 switch (DECL_FUNCTION_CODE (callee_t
))
586 case BUILT_IN_LONGJMP
:
587 case BUILT_IN_NONLOCAL_GOTO
:
589 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
590 local
->pure_const_state
= IPA_NEITHER
;
591 local
->looping
= true;
597 else if (gimple_call_internal_p (call
) && !nonfreeing_call_p (call
))
598 local
->can_free
= true;
600 /* When not in IPA mode, we can still handle self recursion. */
602 && recursive_call_p (current_function_decl
, callee_t
))
605 fprintf (dump_file
, " Recursive call can loop.\n");
606 local
->looping
= true;
608 /* Either callee is unknown or we are doing local analysis.
609 Look to see if there are any bits available for the callee (such as by
610 declaration or because it is builtin) and process solely on the basis of
614 enum pure_const_state_e call_state
;
616 if (possibly_throws
&& cfun
->can_throw_non_call_exceptions
)
619 fprintf (dump_file
, " can throw; looping\n");
620 local
->looping
= true;
622 if (possibly_throws_externally
)
626 fprintf (dump_file
, " can throw externally to lp %i\n",
627 lookup_stmt_eh_lp (call
));
629 fprintf (dump_file
, " callee:%s\n",
630 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
632 local
->can_throw
= true;
634 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
635 fprintf (dump_file
, " checking flags for call:");
636 state_from_flags (&call_state
, &call_looping
, flags
,
637 ((flags
& (ECF_NORETURN
| ECF_NOTHROW
))
638 == (ECF_NORETURN
| ECF_NOTHROW
))
639 || (!flag_exceptions
&& (flags
& ECF_NORETURN
)));
640 worse_state (&local
->pure_const_state
, &local
->looping
,
641 call_state
, call_looping
);
643 /* Direct functions calls are handled by IPA propagation. */
646 /* Wrapper around check_decl for loads in local more. */
649 check_load (gimple
, tree op
, tree
, void *data
)
652 check_decl ((funct_state
)data
, op
, false, false);
654 check_op ((funct_state
)data
, op
, false);
658 /* Wrapper around check_decl for stores in local more. */
661 check_store (gimple
, tree op
, tree
, void *data
)
664 check_decl ((funct_state
)data
, op
, true, false);
666 check_op ((funct_state
)data
, op
, true);
670 /* Wrapper around check_decl for loads in ipa mode. */
673 check_ipa_load (gimple
, tree op
, tree
, void *data
)
676 check_decl ((funct_state
)data
, op
, false, true);
678 check_op ((funct_state
)data
, op
, false);
682 /* Wrapper around check_decl for stores in ipa mode. */
685 check_ipa_store (gimple
, tree op
, tree
, void *data
)
688 check_decl ((funct_state
)data
, op
, true, true);
690 check_op ((funct_state
)data
, op
, true);
694 /* Look into pointer pointed to by GSIP and figure out what interesting side
697 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
699 gimple stmt
= gsi_stmt (*gsip
);
701 if (is_gimple_debug (stmt
))
704 /* Do consider clobber as side effects before IPA, so we rather inline
705 C++ destructors and keep clobber semantics than eliminate them.
707 TODO: We may get smarter during early optimizations on these and let
708 functions containing only clobbers to be optimized more. This is a common
709 case of C++ destructors. */
711 if ((ipa
|| cfun
->after_inlining
) && gimple_clobber_p (stmt
))
716 fprintf (dump_file
, " scanning: ");
717 print_gimple_stmt (dump_file
, stmt
, 0, 0);
720 if (gimple_has_volatile_ops (stmt
)
721 && !gimple_clobber_p (stmt
))
723 local
->pure_const_state
= IPA_NEITHER
;
725 fprintf (dump_file
, " Volatile stmt is not const/pure\n");
728 /* Look for loads and stores. */
729 walk_stmt_load_store_ops (stmt
, local
,
730 ipa
? check_ipa_load
: check_load
,
731 ipa
? check_ipa_store
: check_store
);
733 if (gimple_code (stmt
) != GIMPLE_CALL
734 && stmt_could_throw_p (stmt
))
736 if (cfun
->can_throw_non_call_exceptions
)
739 fprintf (dump_file
, " can throw; looping\n");
740 local
->looping
= true;
742 if (stmt_can_throw_external (stmt
))
745 fprintf (dump_file
, " can throw externally\n");
746 local
->can_throw
= true;
750 fprintf (dump_file
, " can throw\n");
752 switch (gimple_code (stmt
))
755 check_call (local
, as_a
<gcall
*> (stmt
), ipa
);
758 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
759 /* Target of long jump. */
762 fprintf (dump_file
, " nonlocal label is not const/pure\n");
763 local
->pure_const_state
= IPA_NEITHER
;
767 if (gimple_asm_clobbers_memory_p (as_a
<gasm
*> (stmt
)))
770 fprintf (dump_file
, " memory asm clobber is not const/pure\n");
771 /* Abandon all hope, ye who enter here. */
772 local
->pure_const_state
= IPA_NEITHER
;
773 local
->can_free
= true;
775 if (gimple_asm_volatile_p (as_a
<gasm
*> (stmt
)))
778 fprintf (dump_file
, " volatile is not const/pure\n");
779 /* Abandon all hope, ye who enter here. */
780 local
->pure_const_state
= IPA_NEITHER
;
781 local
->looping
= true;
782 local
->can_free
= true;
791 /* This is the main routine for finding the reference patterns for
792 global variables within a function FN. */
795 analyze_function (struct cgraph_node
*fn
, bool ipa
)
797 tree decl
= fn
->decl
;
799 basic_block this_block
;
801 l
= XCNEW (struct funct_state_d
);
802 l
->pure_const_state
= IPA_CONST
;
803 l
->state_previously_known
= IPA_NEITHER
;
804 l
->looping_previously_known
= true;
806 l
->can_throw
= false;
808 state_from_flags (&l
->state_previously_known
, &l
->looping_previously_known
,
809 flags_from_decl_or_type (fn
->decl
),
810 fn
->cannot_return_p ());
812 if (fn
->thunk
.thunk_p
|| fn
->alias
)
814 /* Thunk gets propagated through, so nothing interesting happens. */
816 if (fn
->thunk
.thunk_p
&& fn
->thunk
.virtual_offset_p
)
817 l
->pure_const_state
= IPA_NEITHER
;
823 fprintf (dump_file
, "\n\n local analysis of %s\n ",
827 push_cfun (DECL_STRUCT_FUNCTION (decl
));
829 FOR_EACH_BB_FN (this_block
, cfun
)
831 gimple_stmt_iterator gsi
;
832 struct walk_stmt_info wi
;
834 memset (&wi
, 0, sizeof (wi
));
835 for (gsi
= gsi_start_bb (this_block
);
839 check_stmt (&gsi
, l
, ipa
);
840 if (l
->pure_const_state
== IPA_NEITHER
849 if (l
->pure_const_state
!= IPA_NEITHER
)
851 /* Const functions cannot have back edges (an
852 indication of possible infinite loop side
854 if (mark_dfs_back_edges ())
856 /* Preheaders are needed for SCEV to work.
857 Simple latches and recorded exits improve chances that loop will
858 proved to be finite in testcases such as in loop-15.c
860 loop_optimizer_init (LOOPS_HAVE_PREHEADERS
861 | LOOPS_HAVE_SIMPLE_LATCHES
862 | LOOPS_HAVE_RECORDED_EXITS
);
863 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
864 flow_loops_dump (dump_file
, NULL
, 0);
865 if (mark_irreducible_loops ())
868 fprintf (dump_file
, " has irreducible loops\n");
875 FOR_EACH_LOOP (loop
, 0)
876 if (!finite_loop_p (loop
))
879 fprintf (dump_file
, " can not prove finiteness of "
880 "loop %i\n", loop
->num
);
886 loop_optimizer_finalize ();
890 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
891 fprintf (dump_file
, " checking previously known:");
893 better_state (&l
->pure_const_state
, &l
->looping
,
894 l
->state_previously_known
,
895 l
->looping_previously_known
);
896 if (TREE_NOTHROW (decl
))
897 l
->can_throw
= false;
903 fprintf (dump_file
, "Function is locally looping.\n");
905 fprintf (dump_file
, "Function is locally throwing.\n");
906 if (l
->pure_const_state
== IPA_CONST
)
907 fprintf (dump_file
, "Function is locally const.\n");
908 if (l
->pure_const_state
== IPA_PURE
)
909 fprintf (dump_file
, "Function is locally pure.\n");
911 fprintf (dump_file
, "Function can locally free.\n");
916 /* Called when new function is inserted to callgraph late. */
918 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
920 if (node
->get_availability () < AVAIL_INTERPOSABLE
)
922 /* There are some shared nodes, in particular the initializers on
923 static declarations. We do not need to scan them more than once
924 since all we would be interested in are the addressof
926 if (node
->get_availability () > AVAIL_INTERPOSABLE
927 && opt_for_fn (node
->decl
, flag_ipa_pure_const
))
928 set_function_state (node
, analyze_function (node
, true));
931 /* Called when new clone is inserted to callgraph late. */
934 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
935 void *data ATTRIBUTE_UNUSED
)
937 if (has_function_state (src
))
939 funct_state l
= XNEW (struct funct_state_d
);
940 gcc_assert (!has_function_state (dst
));
941 memcpy (l
, get_function_state (src
), sizeof (*l
));
942 set_function_state (dst
, l
);
946 /* Called when new clone is inserted to callgraph late. */
949 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
951 if (has_function_state (node
))
953 funct_state l
= get_function_state (node
);
954 if (l
!= &varying_state
)
956 set_function_state (node
, NULL
);
962 pass_ipa_pure_const::
963 register_hooks (void)
970 node_removal_hook_holder
=
971 symtab
->add_cgraph_removal_hook (&remove_node_data
, NULL
);
972 node_duplication_hook_holder
=
973 symtab
->add_cgraph_duplication_hook (&duplicate_node_data
, NULL
);
974 function_insertion_hook_holder
=
975 symtab
->add_cgraph_insertion_hook (&add_new_function
, NULL
);
979 /* Analyze each function in the cgraph to see if it is locally PURE or
983 pure_const_generate_summary (void)
985 struct cgraph_node
*node
;
987 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
988 pass
->register_hooks ();
990 /* Process all of the functions.
992 We process AVAIL_INTERPOSABLE functions. We can not use the results
993 by default, but the info can be used at LTO with -fwhole-program or
994 when function got cloned and the clone is AVAILABLE. */
996 FOR_EACH_DEFINED_FUNCTION (node
)
997 if (node
->get_availability () >= AVAIL_INTERPOSABLE
998 && opt_for_fn (node
->decl
, flag_ipa_pure_const
))
999 set_function_state (node
, analyze_function (node
, true));
1003 /* Serialize the ipa info for lto. */
1006 pure_const_write_summary (void)
1008 struct cgraph_node
*node
;
1009 struct lto_simple_output_block
*ob
1010 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
1011 unsigned int count
= 0;
1012 lto_symtab_encoder_iterator lsei
;
1013 lto_symtab_encoder_t encoder
;
1015 encoder
= lto_get_out_decl_state ()->symtab_node_encoder
;
1017 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1018 lsei_next_function_in_partition (&lsei
))
1020 node
= lsei_cgraph_node (lsei
);
1021 if (node
->definition
&& has_function_state (node
))
1025 streamer_write_uhwi_stream (ob
->main_stream
, count
);
1027 /* Process all of the functions. */
1028 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
1029 lsei_next_function_in_partition (&lsei
))
1031 node
= lsei_cgraph_node (lsei
);
1032 if (node
->definition
&& has_function_state (node
))
1034 struct bitpack_d bp
;
1037 lto_symtab_encoder_t encoder
;
1039 fs
= get_function_state (node
);
1041 encoder
= ob
->decl_state
->symtab_node_encoder
;
1042 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
1043 streamer_write_uhwi_stream (ob
->main_stream
, node_ref
);
1045 /* Note that flags will need to be read in the opposite
1046 order as we are pushing the bitflags into FLAGS. */
1047 bp
= bitpack_create (ob
->main_stream
);
1048 bp_pack_value (&bp
, fs
->pure_const_state
, 2);
1049 bp_pack_value (&bp
, fs
->state_previously_known
, 2);
1050 bp_pack_value (&bp
, fs
->looping_previously_known
, 1);
1051 bp_pack_value (&bp
, fs
->looping
, 1);
1052 bp_pack_value (&bp
, fs
->can_throw
, 1);
1053 bp_pack_value (&bp
, fs
->can_free
, 1);
1054 streamer_write_bitpack (&bp
);
1058 lto_destroy_simple_output_block (ob
);
1062 /* Deserialize the ipa info for lto. */
1065 pure_const_read_summary (void)
1067 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
1068 struct lto_file_decl_data
*file_data
;
1071 pass_ipa_pure_const
*pass
= static_cast <pass_ipa_pure_const
*> (current_pass
);
1072 pass
->register_hooks ();
1074 while ((file_data
= file_data_vec
[j
++]))
1078 struct lto_input_block
*ib
1079 = lto_create_simple_input_block (file_data
,
1080 LTO_section_ipa_pure_const
,
1085 unsigned int count
= streamer_read_uhwi (ib
);
1087 for (i
= 0; i
< count
; i
++)
1090 struct cgraph_node
*node
;
1091 struct bitpack_d bp
;
1093 lto_symtab_encoder_t encoder
;
1095 fs
= XCNEW (struct funct_state_d
);
1096 index
= streamer_read_uhwi (ib
);
1097 encoder
= file_data
->symtab_node_encoder
;
1098 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
1100 set_function_state (node
, fs
);
1102 /* Note that the flags must be read in the opposite
1103 order in which they were written (the bitflags were
1104 pushed into FLAGS). */
1105 bp
= streamer_read_bitpack (ib
);
1106 fs
->pure_const_state
1107 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1108 fs
->state_previously_known
1109 = (enum pure_const_state_e
) bp_unpack_value (&bp
, 2);
1110 fs
->looping_previously_known
= bp_unpack_value (&bp
, 1);
1111 fs
->looping
= bp_unpack_value (&bp
, 1);
1112 fs
->can_throw
= bp_unpack_value (&bp
, 1);
1113 fs
->can_free
= bp_unpack_value (&bp
, 1);
1116 int flags
= flags_from_decl_or_type (node
->decl
);
1117 fprintf (dump_file
, "Read info for %s/%i ",
1120 if (flags
& ECF_CONST
)
1121 fprintf (dump_file
, " const");
1122 if (flags
& ECF_PURE
)
1123 fprintf (dump_file
, " pure");
1124 if (flags
& ECF_NOTHROW
)
1125 fprintf (dump_file
, " nothrow");
1126 fprintf (dump_file
, "\n pure const state: %s\n",
1127 pure_const_names
[fs
->pure_const_state
]);
1128 fprintf (dump_file
, " previously known state: %s\n",
1129 pure_const_names
[fs
->looping_previously_known
]);
1131 fprintf (dump_file
," function is locally looping\n");
1132 if (fs
->looping_previously_known
)
1133 fprintf (dump_file
," function is previously known looping\n");
1135 fprintf (dump_file
," function is locally throwing\n");
1137 fprintf (dump_file
," function can locally free\n");
1141 lto_destroy_simple_input_block (file_data
,
1142 LTO_section_ipa_pure_const
,
1150 ignore_edge (struct cgraph_edge
*e
)
1152 return (!e
->can_throw_external
);
1155 /* Return true if NODE is self recursive function.
1156 Indirectly recursive functions appears as non-trivial strongly
1157 connected components, so we need to care about self recursion
1161 self_recursive_p (struct cgraph_node
*node
)
1163 struct cgraph_edge
*e
;
1164 for (e
= node
->callees
; e
; e
= e
->next_callee
)
1165 if (e
->callee
->function_symbol () == node
)
1170 /* Return true if N is cdtor that is not const or pure. In this case we may
1171 need to remove unreachable function if it is marked const/pure. */
1174 cdtor_p (cgraph_node
*n
, void *)
1176 if (DECL_STATIC_CONSTRUCTOR (n
->decl
) || DECL_STATIC_DESTRUCTOR (n
->decl
))
1177 return !TREE_READONLY (n
->decl
) && !DECL_PURE_P (n
->decl
);
1181 /* Produce transitive closure over the callgraph and compute pure/const
1185 propagate_pure_const (void)
1187 struct cgraph_node
*node
;
1188 struct cgraph_node
*w
;
1189 struct cgraph_node
**order
=
1190 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1193 struct ipa_dfs_info
* w_info
;
1194 bool remove_p
= false;
1196 order_pos
= ipa_reduced_postorder (order
, true, false, NULL
);
1199 cgraph_node::dump_cgraph (dump_file
);
1200 ipa_print_order (dump_file
, "reduced", order
, order_pos
);
1203 /* Propagate the local information through the call graph to produce
1204 the global information. All the nodes within a cycle will have
1205 the same info so we collapse cycles first. Then we can do the
1206 propagation in one pass from the leaves to the roots. */
1207 for (i
= 0; i
< order_pos
; i
++ )
1209 enum pure_const_state_e pure_const_state
= IPA_CONST
;
1210 bool looping
= false;
1217 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1218 fprintf (dump_file
, "Starting cycle\n");
1220 /* Find the worst state for any node in the cycle. */
1222 while (w
&& pure_const_state
!= IPA_NEITHER
)
1224 struct cgraph_edge
*e
;
1225 struct cgraph_edge
*ie
;
1227 struct ipa_ref
*ref
= NULL
;
1229 funct_state w_l
= get_function_state (w
);
1230 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1231 fprintf (dump_file
, " Visiting %s/%i state:%s looping %i\n",
1234 pure_const_names
[w_l
->pure_const_state
],
1237 /* First merge in function body properties. */
1238 worse_state (&pure_const_state
, &looping
,
1239 w_l
->pure_const_state
, w_l
->looping
);
1240 if (pure_const_state
== IPA_NEITHER
)
1243 /* For overwritable nodes we can not assume anything. */
1244 if (w
->get_availability () == AVAIL_INTERPOSABLE
)
1246 worse_state (&pure_const_state
, &looping
,
1247 w_l
->state_previously_known
,
1248 w_l
->looping_previously_known
);
1249 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1252 " Overwritable. state %s looping %i\n",
1253 pure_const_names
[w_l
->state_previously_known
],
1254 w_l
->looping_previously_known
);
1261 /* We consider recursive cycles as possibly infinite.
1262 This might be relaxed since infinite recursion leads to stack
1267 /* Now walk the edges and merge in callee properties. */
1268 for (e
= w
->callees
; e
; e
= e
->next_callee
)
1270 enum availability avail
;
1271 struct cgraph_node
*y
= e
->callee
->
1272 function_or_virtual_thunk_symbol (&avail
);
1273 enum pure_const_state_e edge_state
= IPA_CONST
;
1274 bool edge_looping
= false;
1276 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1283 if (avail
> AVAIL_INTERPOSABLE
)
1285 funct_state y_l
= get_function_state (y
);
1286 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1289 " state:%s looping:%i\n",
1290 pure_const_names
[y_l
->pure_const_state
],
1293 if (y_l
->pure_const_state
> IPA_PURE
1294 && e
->cannot_lead_to_return_p ())
1296 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1298 " Ignoring side effects"
1299 " -> pure, looping\n");
1300 edge_state
= IPA_PURE
;
1301 edge_looping
= true;
1305 edge_state
= y_l
->pure_const_state
;
1306 edge_looping
= y_l
->looping
;
1309 else if (special_builtin_state (&edge_state
, &edge_looping
,
1313 state_from_flags (&edge_state
, &edge_looping
,
1314 flags_from_decl_or_type (y
->decl
),
1315 e
->cannot_lead_to_return_p ());
1317 /* Merge the results with what we already know. */
1318 better_state (&edge_state
, &edge_looping
,
1319 w_l
->state_previously_known
,
1320 w_l
->looping_previously_known
);
1321 worse_state (&pure_const_state
, &looping
,
1322 edge_state
, edge_looping
);
1323 if (pure_const_state
== IPA_NEITHER
)
1326 if (pure_const_state
== IPA_NEITHER
)
1329 /* Now process the indirect call. */
1330 for (ie
= w
->indirect_calls
; ie
; ie
= ie
->next_callee
)
1332 enum pure_const_state_e edge_state
= IPA_CONST
;
1333 bool edge_looping
= false;
1335 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1336 fprintf (dump_file
, " Indirect call");
1337 state_from_flags (&edge_state
, &edge_looping
,
1338 ie
->indirect_info
->ecf_flags
,
1339 ie
->cannot_lead_to_return_p ());
1340 /* Merge the results with what we already know. */
1341 better_state (&edge_state
, &edge_looping
,
1342 w_l
->state_previously_known
,
1343 w_l
->looping_previously_known
);
1344 worse_state (&pure_const_state
, &looping
,
1345 edge_state
, edge_looping
);
1346 if (pure_const_state
== IPA_NEITHER
)
1349 if (pure_const_state
== IPA_NEITHER
)
1352 /* And finally all loads and stores. */
1353 for (i
= 0; w
->iterate_reference (i
, ref
); i
++)
1355 enum pure_const_state_e ref_state
= IPA_CONST
;
1356 bool ref_looping
= false;
1360 /* readonly reads are safe. */
1361 if (TREE_READONLY (ref
->referred
->decl
))
1363 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1364 fprintf (dump_file
, " nonreadonly global var read\n");
1365 ref_state
= IPA_PURE
;
1368 if (ref
->cannot_lead_to_return ())
1370 ref_state
= IPA_NEITHER
;
1371 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1372 fprintf (dump_file
, " global var write\n");
1380 better_state (&ref_state
, &ref_looping
,
1381 w_l
->state_previously_known
,
1382 w_l
->looping_previously_known
);
1383 worse_state (&pure_const_state
, &looping
,
1384 ref_state
, ref_looping
);
1385 if (pure_const_state
== IPA_NEITHER
)
1388 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1389 w
= w_info
->next_cycle
;
1391 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1392 fprintf (dump_file
, "Result %s looping %i\n",
1393 pure_const_names
[pure_const_state
],
1396 /* Find the worst state of can_free for any node in the cycle. */
1397 bool can_free
= false;
1399 while (w
&& !can_free
)
1401 struct cgraph_edge
*e
;
1402 funct_state w_l
= get_function_state (w
);
1405 || w
->get_availability () == AVAIL_INTERPOSABLE
1406 || w
->indirect_calls
)
1409 for (e
= w
->callees
; e
&& !can_free
; e
= e
->next_callee
)
1411 enum availability avail
;
1412 struct cgraph_node
*y
= e
->callee
->
1413 function_or_virtual_thunk_symbol (&avail
);
1415 if (avail
> AVAIL_INTERPOSABLE
)
1416 can_free
= get_function_state (y
)->can_free
;
1420 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1421 w
= w_info
->next_cycle
;
1424 /* Copy back the region's pure_const_state which is shared by
1425 all nodes in the region. */
1429 funct_state w_l
= get_function_state (w
);
1430 enum pure_const_state_e this_state
= pure_const_state
;
1431 bool this_looping
= looping
;
1433 w_l
->can_free
= can_free
;
1434 w
->nonfreeing_fn
= !can_free
;
1435 if (!can_free
&& dump_file
)
1436 fprintf (dump_file
, "Function found not to call free: %s\n",
1439 if (w_l
->state_previously_known
!= IPA_NEITHER
1440 && this_state
> w_l
->state_previously_known
)
1442 this_state
= w_l
->state_previously_known
;
1443 this_looping
|= w_l
->looping_previously_known
;
1445 if (!this_looping
&& self_recursive_p (w
))
1446 this_looping
= true;
1447 if (!w_l
->looping_previously_known
)
1448 this_looping
= false;
1450 /* All nodes within a cycle share the same info. */
1451 w_l
->pure_const_state
= this_state
;
1452 w_l
->looping
= this_looping
;
1454 /* Inline clones share declaration with their offline copies;
1455 do not modify their declarations since the offline copy may
1457 if (!w
->global
.inlined_to
)
1461 if (!TREE_READONLY (w
->decl
))
1463 warn_function_const (w
->decl
, !this_looping
);
1465 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1466 this_looping
? "looping " : "",
1469 remove_p
|= w
->call_for_symbol_and_aliases (cdtor_p
,
1471 w
->set_const_flag (true, this_looping
);
1475 if (!DECL_PURE_P (w
->decl
))
1477 warn_function_pure (w
->decl
, !this_looping
);
1479 fprintf (dump_file
, "Function found to be %spure: %s\n",
1480 this_looping
? "looping " : "",
1483 remove_p
|= w
->call_for_symbol_and_aliases (cdtor_p
,
1485 w
->set_pure_flag (true, this_looping
);
1491 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1492 w
= w_info
->next_cycle
;
1496 ipa_free_postorder_info ();
1501 /* Produce transitive closure over the callgraph and compute nothrow
1505 propagate_nothrow (void)
1507 struct cgraph_node
*node
;
1508 struct cgraph_node
*w
;
1509 struct cgraph_node
**order
=
1510 XCNEWVEC (struct cgraph_node
*, symtab
->cgraph_count
);
1513 struct ipa_dfs_info
* w_info
;
1515 order_pos
= ipa_reduced_postorder (order
, true, false, ignore_edge
);
1518 cgraph_node::dump_cgraph (dump_file
);
1519 ipa_print_order (dump_file
, "reduced for nothrow", order
, order_pos
);
1522 /* Propagate the local information through the call graph to produce
1523 the global information. All the nodes within a cycle will have
1524 the same info so we collapse cycles first. Then we can do the
1525 propagation in one pass from the leaves to the roots. */
1526 for (i
= 0; i
< order_pos
; i
++ )
1528 bool can_throw
= false;
1534 /* Find the worst state for any node in the cycle. */
1536 while (w
&& !can_throw
)
1538 struct cgraph_edge
*e
, *ie
;
1539 funct_state w_l
= get_function_state (w
);
1542 || w
->get_availability () == AVAIL_INTERPOSABLE
)
1545 for (e
= w
->callees
; e
&& !can_throw
; e
= e
->next_callee
)
1547 enum availability avail
;
1548 struct cgraph_node
*y
= e
->callee
->
1549 function_or_virtual_thunk_symbol (&avail
);
1551 if (avail
> AVAIL_INTERPOSABLE
)
1553 funct_state y_l
= get_function_state (y
);
1555 if (y_l
->can_throw
&& !TREE_NOTHROW (w
->decl
)
1556 && e
->can_throw_external
)
1559 else if (e
->can_throw_external
&& !TREE_NOTHROW (y
->decl
))
1562 for (ie
= w
->indirect_calls
; ie
&& !can_throw
; ie
= ie
->next_callee
)
1563 if (ie
->can_throw_external
)
1565 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1566 w
= w_info
->next_cycle
;
1569 /* Copy back the region's pure_const_state which is shared by
1570 all nodes in the region. */
1574 funct_state w_l
= get_function_state (w
);
1575 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1577 /* Inline clones share declaration with their offline copies;
1578 do not modify their declarations since the offline copy may
1580 if (!w
->global
.inlined_to
)
1582 w
->set_nothrow_flag (true);
1584 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1588 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1589 w_l
->can_throw
= true;
1590 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1591 w
= w_info
->next_cycle
;
1595 ipa_free_postorder_info ();
1600 /* Produce the global information by preforming a transitive closure
1601 on the local information that was produced by generate_summary. */
1604 pass_ipa_pure_const::
1605 execute (function
*)
1607 struct cgraph_node
*node
;
1610 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
1611 symtab
->remove_cgraph_duplication_hook (node_duplication_hook_holder
);
1612 symtab
->remove_cgraph_removal_hook (node_removal_hook_holder
);
1614 /* Nothrow makes more function to not lead to return and improve
1616 propagate_nothrow ();
1617 remove_p
= propagate_pure_const ();
1620 FOR_EACH_FUNCTION (node
)
1621 if (has_function_state (node
))
1622 free (get_function_state (node
));
1623 funct_state_vec
.release ();
1624 return remove_p
? TODO_remove_functions
: 0;
1628 gate_pure_const (void)
1630 return flag_ipa_pure_const
|| in_lto_p
;
1633 pass_ipa_pure_const::pass_ipa_pure_const(gcc::context
*ctxt
)
1634 : ipa_opt_pass_d(pass_data_ipa_pure_const
, ctxt
,
1635 pure_const_generate_summary
, /* generate_summary */
1636 pure_const_write_summary
, /* write_summary */
1637 pure_const_read_summary
, /* read_summary */
1638 NULL
, /* write_optimization_summary */
1639 NULL
, /* read_optimization_summary */
1640 NULL
, /* stmt_fixup */
1641 0, /* function_transform_todo_flags_start */
1642 NULL
, /* function_transform */
1643 NULL
), /* variable_transform */
1645 function_insertion_hook_holder(NULL
),
1646 node_duplication_hook_holder(NULL
),
1647 node_removal_hook_holder(NULL
)
1652 make_pass_ipa_pure_const (gcc::context
*ctxt
)
1654 return new pass_ipa_pure_const (ctxt
);
1657 /* Return true if function should be skipped for local pure const analysis. */
1660 skip_function_for_local_pure_const (struct cgraph_node
*node
)
1662 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1663 we must not promote functions that are called by already processed functions. */
1665 if (function_called_by_processed_nodes_p ())
1668 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
1671 if (node
->get_availability () <= AVAIL_INTERPOSABLE
)
1674 fprintf (dump_file
, "Function is not available or overwritable; not analyzing.\n");
1680 /* Simple local pass for pure const discovery reusing the analysis from
1681 ipa_pure_const. This pass is effective when executed together with
1682 other optimization passes in early optimization pass queue. */
1686 const pass_data pass_data_local_pure_const
=
1688 GIMPLE_PASS
, /* type */
1689 "local-pure-const", /* name */
1690 OPTGROUP_NONE
, /* optinfo_flags */
1691 TV_IPA_PURE_CONST
, /* tv_id */
1692 0, /* properties_required */
1693 0, /* properties_provided */
1694 0, /* properties_destroyed */
1695 0, /* todo_flags_start */
1696 0, /* todo_flags_finish */
1699 class pass_local_pure_const
: public gimple_opt_pass
1702 pass_local_pure_const (gcc::context
*ctxt
)
1703 : gimple_opt_pass (pass_data_local_pure_const
, ctxt
)
1706 /* opt_pass methods: */
1707 opt_pass
* clone () { return new pass_local_pure_const (m_ctxt
); }
1708 virtual bool gate (function
*) { return gate_pure_const (); }
1709 virtual unsigned int execute (function
*);
1711 }; // class pass_local_pure_const
1714 pass_local_pure_const::execute (function
*fun
)
1716 bool changed
= false;
1719 struct cgraph_node
*node
;
1721 node
= cgraph_node::get (current_function_decl
);
1722 skip
= skip_function_for_local_pure_const (node
);
1723 if (!warn_suggest_attribute_const
1724 && !warn_suggest_attribute_pure
1728 l
= analyze_function (node
, false);
1730 /* Do NORETURN discovery. */
1731 if (!skip
&& !TREE_THIS_VOLATILE (current_function_decl
)
1732 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
1734 warn_function_noreturn (fun
->decl
);
1736 fprintf (dump_file
, "Function found to be noreturn: %s\n",
1737 current_function_name ());
1739 /* Update declaration and reduce profile to executed once. */
1740 TREE_THIS_VOLATILE (current_function_decl
) = 1;
1741 if (node
->frequency
> NODE_FREQUENCY_EXECUTED_ONCE
)
1742 node
->frequency
= NODE_FREQUENCY_EXECUTED_ONCE
;
1747 switch (l
->pure_const_state
)
1750 if (!TREE_READONLY (current_function_decl
))
1752 warn_function_const (current_function_decl
, !l
->looping
);
1755 node
->set_const_flag (true, l
->looping
);
1759 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1760 l
->looping
? "looping " : "",
1761 current_function_name ());
1763 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1768 node
->set_const_flag (true, false);
1772 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1773 current_function_name ());
1778 if (!DECL_PURE_P (current_function_decl
))
1782 node
->set_pure_flag (true, l
->looping
);
1785 warn_function_pure (current_function_decl
, !l
->looping
);
1787 fprintf (dump_file
, "Function found to be %spure: %s\n",
1788 l
->looping
? "looping " : "",
1789 current_function_name ());
1791 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1796 node
->set_pure_flag (true, false);
1800 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1801 current_function_name ());
1808 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
1810 node
->set_nothrow_flag (true);
1813 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1814 current_function_name ());
1818 return execute_fixup_cfg ();
1826 make_pass_local_pure_const (gcc::context
*ctxt
)
1828 return new pass_local_pure_const (ctxt
);
1831 /* Emit noreturn warnings. */
1835 const pass_data pass_data_warn_function_noreturn
=
1837 GIMPLE_PASS
, /* type */
1838 "*warn_function_noreturn", /* name */
1839 OPTGROUP_NONE
, /* optinfo_flags */
1840 TV_NONE
, /* tv_id */
1841 PROP_cfg
, /* properties_required */
1842 0, /* properties_provided */
1843 0, /* properties_destroyed */
1844 0, /* todo_flags_start */
1845 0, /* todo_flags_finish */
1848 class pass_warn_function_noreturn
: public gimple_opt_pass
1851 pass_warn_function_noreturn (gcc::context
*ctxt
)
1852 : gimple_opt_pass (pass_data_warn_function_noreturn
, ctxt
)
1855 /* opt_pass methods: */
1856 virtual bool gate (function
*) { return warn_suggest_attribute_noreturn
; }
1857 virtual unsigned int execute (function
*fun
)
1859 if (!TREE_THIS_VOLATILE (current_function_decl
)
1860 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun
)->preds
) == 0)
1861 warn_function_noreturn (current_function_decl
);
1865 }; // class pass_warn_function_noreturn
1870 make_pass_warn_function_noreturn (gcc::context
*ctxt
)
1872 return new pass_warn_function_noreturn (ctxt
);
1875 /* Simple local pass for pure const discovery reusing the analysis from
1876 ipa_pure_const. This pass is effective when executed together with
1877 other optimization passes in early optimization pass queue. */
1881 const pass_data pass_data_nothrow
=
1883 GIMPLE_PASS
, /* type */
1884 "nothrow", /* name */
1885 OPTGROUP_NONE
, /* optinfo_flags */
1886 TV_IPA_PURE_CONST
, /* tv_id */
1887 0, /* properties_required */
1888 0, /* properties_provided */
1889 0, /* properties_destroyed */
1890 0, /* todo_flags_start */
1891 0, /* todo_flags_finish */
1894 class pass_nothrow
: public gimple_opt_pass
1897 pass_nothrow (gcc::context
*ctxt
)
1898 : gimple_opt_pass (pass_data_nothrow
, ctxt
)
1901 /* opt_pass methods: */
1902 opt_pass
* clone () { return new pass_nothrow (m_ctxt
); }
1903 virtual bool gate (function
*) { return optimize
; }
1904 virtual unsigned int execute (function
*);
1906 }; // class pass_nothrow
1909 pass_nothrow::execute (function
*)
1911 struct cgraph_node
*node
;
1912 basic_block this_block
;
1914 if (TREE_NOTHROW (current_function_decl
))
1917 node
= cgraph_node::get (current_function_decl
);
1919 /* We run during lowering, we can not really use availability yet. */
1920 if (cgraph_node::get (current_function_decl
)->get_availability ()
1921 <= AVAIL_INTERPOSABLE
)
1924 fprintf (dump_file
, "Function is interposable;"
1925 " not analyzing.\n");
1929 FOR_EACH_BB_FN (this_block
, cfun
)
1931 for (gimple_stmt_iterator gsi
= gsi_start_bb (this_block
);
1934 if (stmt_can_throw_external (gsi_stmt (gsi
)))
1936 if (is_gimple_call (gsi_stmt (gsi
)))
1938 tree callee_t
= gimple_call_fndecl (gsi_stmt (gsi
));
1939 if (callee_t
&& recursive_call_p (current_function_decl
,
1946 fprintf (dump_file
, "Statement can throw: ");
1947 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
1953 node
->set_nothrow_flag (true);
1955 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1956 current_function_name ());
1963 make_pass_nothrow (gcc::context
*ctxt
)
1965 return new pass_nothrow (ctxt
);