1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009 Free Software Foundation, Inc.
3 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file marks functions as being either const (TREE_READONLY) or
22 pure (DECL_PURE_P). It can also set a variant of these that
23 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
25 This must be run after inlining decisions have been made since
26 otherwise, the local sets will not contain information that is
27 consistent with post inlined state. The global sets are not prone
28 to this problem since they are by definition transitive. */
30 /* The code in this module is called by the ipa pass manager. It
31 should be one of the later passes since it's information is used by
32 the rest of the compilation. */
36 #include "coretypes.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "langhooks.h"
43 #include "pointer-set.h"
45 #include "ipa-utils.h"
51 #include "diagnostic.h"
52 #include "langhooks.h"
54 #include "lto-streamer.h"
56 #include "tree-scalar-evolution.h"
58 static struct pointer_set_t
*visited_nodes
;
60 /* Lattice values for const and pure functions. Everything starts out
61 being const, then may drop to pure and then neither depending on
63 enum pure_const_state_e
70 /* Holder for the const_state. There is one of these per function
75 enum pure_const_state_e pure_const_state
;
76 /* What user set here; we can be always sure about this. */
77 enum pure_const_state_e state_previously_known
;
78 bool looping_previously_known
;
80 /* True if the function could possibly infinite loop. There are a
81 lot of ways that this could be determined. We are pretty
82 conservative here. While it is possible to cse pure and const
83 calls, it is not legal to have dce get rid of the call if there
84 is a possibility that the call could infinite loop since this is
85 a behavioral change. */
91 typedef struct funct_state_d
* funct_state
;
93 /* The storage of the funct_state is abstracted because there is the
94 possibility that it may be desirable to move this to the cgraph
97 /* Array, indexed by cgraph node uid, of function states. */
99 DEF_VEC_P (funct_state
);
100 DEF_VEC_ALLOC_P (funct_state
, heap
);
101 static VEC (funct_state
, heap
) *funct_state_vec
;
103 /* Holders of ipa cgraph hooks: */
104 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
105 static struct cgraph_2node_hook_list
*node_duplication_hook_holder
;
106 static struct cgraph_node_hook_list
*node_removal_hook_holder
;
108 /* Init the function state. */
113 free (funct_state_vec
);
117 /* Return the function state from NODE. */
119 static inline funct_state
120 get_function_state (struct cgraph_node
*node
)
123 || VEC_length (funct_state
, funct_state_vec
) <= (unsigned int)node
->uid
)
125 return VEC_index (funct_state
, funct_state_vec
, node
->uid
);
128 /* Set the function state S for NODE. */
131 set_function_state (struct cgraph_node
*node
, funct_state s
)
134 || VEC_length (funct_state
, funct_state_vec
) <= (unsigned int)node
->uid
)
135 VEC_safe_grow_cleared (funct_state
, heap
, funct_state_vec
, node
->uid
+ 1);
136 VEC_replace (funct_state
, funct_state_vec
, node
->uid
, s
);
139 /* Check to see if the use (or definition when CHECKING_WRITE is true)
140 variable T is legal in a function that is either pure or const. */
143 check_decl (funct_state local
,
144 tree t
, bool checking_write
)
146 /* Do not want to do anything with volatile except mark any
147 function that uses one to be not const or pure. */
148 if (TREE_THIS_VOLATILE (t
))
150 local
->pure_const_state
= IPA_NEITHER
;
152 fprintf (dump_file
, " Volatile operand is not const/pure");
156 /* Do not care about a local automatic that is not static. */
157 if (!TREE_STATIC (t
) && !DECL_EXTERNAL (t
))
160 /* If the variable has the "used" attribute, treat it as if it had a
161 been touched by the devil. */
162 if (lookup_attribute ("used", DECL_ATTRIBUTES (t
)))
164 local
->pure_const_state
= IPA_NEITHER
;
166 fprintf (dump_file
, " Used static/global variable is not const/pure\n");
170 /* Since we have dealt with the locals and params cases above, if we
171 are CHECKING_WRITE, this cannot be a pure or constant
175 local
->pure_const_state
= IPA_NEITHER
;
177 fprintf (dump_file
, " static/global memory write is not const/pure\n");
181 if (DECL_EXTERNAL (t
) || TREE_PUBLIC (t
))
183 /* Readonly reads are safe. */
184 if (TREE_READONLY (t
) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t
)))
185 return; /* Read of a constant, do not change the function state. */
189 fprintf (dump_file
, " global memory read is not const\n");
190 /* Just a regular read. */
191 if (local
->pure_const_state
== IPA_CONST
)
192 local
->pure_const_state
= IPA_PURE
;
197 /* Compilation level statics can be read if they are readonly
199 if (TREE_READONLY (t
))
203 fprintf (dump_file
, " static memory read is not const\n");
204 /* Just a regular read. */
205 if (local
->pure_const_state
== IPA_CONST
)
206 local
->pure_const_state
= IPA_PURE
;
211 /* Check to see if the use (or definition when CHECKING_WRITE is true)
212 variable T is legal in a function that is either pure or const. */
215 check_op (funct_state local
, tree t
, bool checking_write
)
217 t
= get_base_address (t
);
218 if (t
&& TREE_THIS_VOLATILE (t
))
220 local
->pure_const_state
= IPA_NEITHER
;
222 fprintf (dump_file
, " Volatile indirect ref is not const/pure\n");
226 && INDIRECT_REF_P (t
)
227 && TREE_CODE (TREE_OPERAND (t
, 0)) == SSA_NAME
228 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t
, 0)))
231 fprintf (dump_file
, " Indirect ref to local memory is OK\n");
234 else if (checking_write
)
236 local
->pure_const_state
= IPA_NEITHER
;
238 fprintf (dump_file
, " Indirect ref write is not const/pure\n");
244 fprintf (dump_file
, " Indirect ref read is not const\n");
245 if (local
->pure_const_state
== IPA_CONST
)
246 local
->pure_const_state
= IPA_PURE
;
250 /* Check the parameters of a function call to CALL_EXPR to see if
251 there are any references in the parameters that are not allowed for
252 pure or const functions. Also check to see if this is either an
253 indirect call, a call outside the compilation unit, or has special
254 attributes that may also effect the purity. The CALL_EXPR node for
255 the entire call expression. */
258 check_call (funct_state local
, gimple call
, bool ipa
)
260 int flags
= gimple_call_flags (call
);
261 tree callee_t
= gimple_call_fndecl (call
);
262 struct cgraph_node
* callee
;
263 enum availability avail
= AVAIL_NOT_AVAILABLE
;
264 bool possibly_throws
= stmt_could_throw_p (call
);
265 bool possibly_throws_externally
= (possibly_throws
266 && stmt_can_throw_external (call
));
271 for (i
= 0; i
< gimple_num_ops (call
); i
++)
272 if (gimple_op (call
, i
)
273 && tree_could_throw_p (gimple_op (call
, i
)))
275 if (possibly_throws
&& flag_non_call_exceptions
)
278 fprintf (dump_file
, " operand can throw; looping\n");
279 local
->looping
= true;
281 if (possibly_throws_externally
)
284 fprintf (dump_file
, " operand can throw externally\n");
285 local
->can_throw
= true;
290 /* The const and pure flags are set by a variety of places in the
291 compiler (including here). If someone has already set the flags
292 for the callee, (such as for some of the builtins) we will use
293 them, otherwise we will compute our own information.
295 Const and pure functions have less clobber effects than other
296 functions so we process these first. Otherwise if it is a call
297 outside the compilation unit or an indirect call we punt. This
298 leaves local calls which will be processed by following the call
302 callee
= cgraph_node(callee_t
);
303 avail
= cgraph_function_body_availability (callee
);
305 /* When bad things happen to bad functions, they cannot be const
307 if (setjmp_call_p (callee_t
))
310 fprintf (dump_file
, " setjmp is not const/pure\n");
311 local
->looping
= true;
312 local
->pure_const_state
= IPA_NEITHER
;
315 if (DECL_BUILT_IN_CLASS (callee_t
) == BUILT_IN_NORMAL
)
316 switch (DECL_FUNCTION_CODE (callee_t
))
318 case BUILT_IN_LONGJMP
:
319 case BUILT_IN_NONLOCAL_GOTO
:
321 fprintf (dump_file
, " longjmp and nonlocal goto is not const/pure\n");
322 local
->pure_const_state
= IPA_NEITHER
;
323 local
->looping
= true;
330 /* When not in IPA mode, we can still handle self recursion. */
331 if (!ipa
&& callee_t
== current_function_decl
)
332 local
->looping
= true;
333 /* Either calle is unknown or we are doing local analysis.
334 Look to see if there are any bits available for the callee (such as by
335 declaration or because it is builtin) and process solely on the basis of
337 else if (!ipa
|| !callee_t
)
339 if (possibly_throws
&& flag_non_call_exceptions
)
342 fprintf (dump_file
, " can throw; looping\n");
343 local
->looping
= true;
345 if (possibly_throws_externally
)
349 fprintf (dump_file
, " can throw externally to lp %i\n",
350 lookup_stmt_eh_lp (call
));
352 fprintf (dump_file
, " callee:%s\n",
353 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t
)));
355 local
->can_throw
= true;
357 if (flags
& ECF_CONST
)
359 if (callee_t
&& DECL_LOOPING_CONST_OR_PURE_P (callee_t
))
360 local
->looping
= true;
362 else if (flags
& ECF_PURE
)
364 if (callee_t
&& DECL_LOOPING_CONST_OR_PURE_P (callee_t
))
365 local
->looping
= true;
367 fprintf (dump_file
, " pure function call in not const\n");
368 if (local
->pure_const_state
== IPA_CONST
)
369 local
->pure_const_state
= IPA_PURE
;
374 fprintf (dump_file
, " uknown function call is not const/pure\n");
375 local
->pure_const_state
= IPA_NEITHER
;
376 local
->looping
= true;
379 /* Direct functions calls are handled by IPA propagation. */
382 /* Wrapper around check_decl for loads. */
385 check_load (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
388 check_decl ((funct_state
)data
, op
, false);
390 check_op ((funct_state
)data
, op
, false);
394 /* Wrapper around check_decl for stores. */
397 check_store (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
400 check_decl ((funct_state
)data
, op
, true);
402 check_op ((funct_state
)data
, op
, true);
406 /* Look into pointer pointed to by GSIP and figure out what interesting side
409 check_stmt (gimple_stmt_iterator
*gsip
, funct_state local
, bool ipa
)
411 gimple stmt
= gsi_stmt (*gsip
);
414 if (is_gimple_debug (stmt
))
419 fprintf (dump_file
, " scanning: ");
420 print_gimple_stmt (dump_file
, stmt
, 0, 0);
423 /* Look for loads and stores. */
424 walk_stmt_load_store_ops (stmt
, local
, check_load
, check_store
);
426 if (gimple_code (stmt
) != GIMPLE_CALL
427 && stmt_could_throw_p (stmt
))
429 if (flag_non_call_exceptions
)
432 fprintf (dump_file
, " can throw; looping");
433 local
->looping
= true;
435 if (stmt_can_throw_external (stmt
))
438 fprintf (dump_file
, " can throw externally");
439 local
->can_throw
= true;
442 switch (gimple_code (stmt
))
445 check_call (local
, stmt
, ipa
);
448 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
449 /* Target of long jump. */
452 fprintf (dump_file
, " nonlocal label is not const/pure");
453 local
->pure_const_state
= IPA_NEITHER
;
457 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
459 tree op
= gimple_asm_clobber_op (stmt
, i
);
460 if (simple_cst_equal(TREE_VALUE (op
), memory_identifier_string
) == 1)
463 fprintf (dump_file
, " memory asm clobber is not const/pure");
464 /* Abandon all hope, ye who enter here. */
465 local
->pure_const_state
= IPA_NEITHER
;
468 if (gimple_asm_volatile_p (stmt
))
471 fprintf (dump_file
, " volatile is not const/pure");
472 /* Abandon all hope, ye who enter here. */
473 local
->pure_const_state
= IPA_NEITHER
;
474 local
->looping
= true;
483 /* This is the main routine for finding the reference patterns for
484 global variables within a function FN. */
487 analyze_function (struct cgraph_node
*fn
, bool ipa
)
489 tree decl
= fn
->decl
;
490 tree old_decl
= current_function_decl
;
492 basic_block this_block
;
494 l
= XCNEW (struct funct_state_d
);
495 l
->pure_const_state
= IPA_CONST
;
496 l
->state_previously_known
= IPA_NEITHER
;
497 l
->looping_previously_known
= true;
499 l
->can_throw
= false;
503 fprintf (dump_file
, "\n\n local analysis of %s\n ",
504 cgraph_node_name (fn
));
507 push_cfun (DECL_STRUCT_FUNCTION (decl
));
508 current_function_decl
= decl
;
510 FOR_EACH_BB (this_block
)
512 gimple_stmt_iterator gsi
;
513 struct walk_stmt_info wi
;
515 memset (&wi
, 0, sizeof(wi
));
516 for (gsi
= gsi_start_bb (this_block
);
520 check_stmt (&gsi
, l
, ipa
);
521 if (l
->pure_const_state
== IPA_NEITHER
&& l
->looping
&& l
->can_throw
)
527 if (l
->pure_const_state
!= IPA_NEITHER
)
529 /* Const functions cannot have back edges (an
530 indication of possible infinite loop side
532 if (mark_dfs_back_edges ())
534 /* Preheaders are needed for SCEV to work.
535 Simple lateches and recorded exits improve chances that loop will
536 proved to be finite in testcases such as in loop-15.c and loop-24.c */
537 loop_optimizer_init (LOOPS_NORMAL
538 | LOOPS_HAVE_RECORDED_EXITS
);
539 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
540 flow_loops_dump (dump_file
, NULL
, 0);
541 if (mark_irreducible_loops ())
544 fprintf (dump_file
, " has irreducible loops\n");
552 FOR_EACH_LOOP (li
, loop
, 0)
553 if (!finite_loop_p (loop
))
556 fprintf (dump_file
, " can not prove finiteness of loop %i\n", loop
->num
);
562 loop_optimizer_finalize ();
566 if (TREE_READONLY (decl
))
568 l
->pure_const_state
= IPA_CONST
;
569 l
->state_previously_known
= IPA_CONST
;
570 if (!DECL_LOOPING_CONST_OR_PURE_P (decl
))
571 l
->looping
= false, l
->looping_previously_known
= false;
573 if (DECL_PURE_P (decl
))
575 if (l
->pure_const_state
!= IPA_CONST
)
576 l
->pure_const_state
= IPA_PURE
;
577 l
->state_previously_known
= IPA_PURE
;
578 if (!DECL_LOOPING_CONST_OR_PURE_P (decl
))
579 l
->looping
= false, l
->looping_previously_known
= false;
581 if (TREE_NOTHROW (decl
))
582 l
->can_throw
= false;
585 current_function_decl
= old_decl
;
589 fprintf (dump_file
, "Function is locally looping.\n");
591 fprintf (dump_file
, "Function is locally throwing.\n");
592 if (l
->pure_const_state
== IPA_CONST
)
593 fprintf (dump_file
, "Function is locally const.\n");
594 if (l
->pure_const_state
== IPA_PURE
)
595 fprintf (dump_file
, "Function is locally pure.\n");
600 /* Called when new function is inserted to callgraph late. */
602 add_new_function (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
604 if (cgraph_function_body_availability (node
) < AVAIL_OVERWRITABLE
)
606 /* There are some shared nodes, in particular the initializers on
607 static declarations. We do not need to scan them more than once
608 since all we would be interested in are the addressof
610 visited_nodes
= pointer_set_create ();
611 set_function_state (node
, analyze_function (node
, true));
612 pointer_set_destroy (visited_nodes
);
613 visited_nodes
= NULL
;
616 /* Called when new clone is inserted to callgraph late. */
619 duplicate_node_data (struct cgraph_node
*src
, struct cgraph_node
*dst
,
620 void *data ATTRIBUTE_UNUSED
)
622 if (get_function_state (src
))
624 funct_state l
= XNEW (struct funct_state_d
);
625 gcc_assert (!get_function_state (dst
));
626 memcpy (l
, get_function_state (src
), sizeof (*l
));
627 set_function_state (dst
, l
);
631 /* Called when new clone is inserted to callgraph late. */
634 remove_node_data (struct cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
636 if (get_function_state (node
))
638 free (get_function_state (node
));
639 set_function_state (node
, NULL
);
645 register_hooks (void)
647 static bool init_p
= false;
654 node_removal_hook_holder
=
655 cgraph_add_node_removal_hook (&remove_node_data
, NULL
);
656 node_duplication_hook_holder
=
657 cgraph_add_node_duplication_hook (&duplicate_node_data
, NULL
);
658 function_insertion_hook_holder
=
659 cgraph_add_function_insertion_hook (&add_new_function
, NULL
);
663 /* Analyze each function in the cgraph to see if it is locally PURE or
667 generate_summary (void)
669 struct cgraph_node
*node
;
673 /* There are some shared nodes, in particular the initializers on
674 static declarations. We do not need to scan them more than once
675 since all we would be interested in are the addressof
677 visited_nodes
= pointer_set_create ();
679 /* Process all of the functions.
681 We process AVAIL_OVERWRITABLE functions. We can not use the results
682 by default, but the info can be used at LTO with -fwhole-program or
683 when function got clonned and the clone is AVAILABLE. */
685 for (node
= cgraph_nodes
; node
; node
= node
->next
)
686 if (cgraph_function_body_availability (node
) >= AVAIL_OVERWRITABLE
)
687 set_function_state (node
, analyze_function (node
, true));
689 pointer_set_destroy (visited_nodes
);
690 visited_nodes
= NULL
;
694 /* Serialize the ipa info for lto. */
697 pure_const_write_summary (cgraph_node_set set
)
699 struct cgraph_node
*node
;
700 struct lto_simple_output_block
*ob
701 = lto_create_simple_output_block (LTO_section_ipa_pure_const
);
702 unsigned int count
= 0;
703 cgraph_node_set_iterator csi
;
705 for (csi
= csi_start (set
); !csi_end_p (csi
); csi_next (&csi
))
707 node
= csi_node (csi
);
708 if (node
->analyzed
&& get_function_state (node
) != NULL
)
712 lto_output_uleb128_stream (ob
->main_stream
, count
);
714 /* Process all of the functions. */
715 for (csi
= csi_start (set
); !csi_end_p (csi
); csi_next (&csi
))
717 node
= csi_node (csi
);
718 if (node
->analyzed
&& get_function_state (node
) != NULL
)
720 struct bitpack_d
*bp
;
723 lto_cgraph_encoder_t encoder
;
725 fs
= get_function_state (node
);
727 encoder
= ob
->decl_state
->cgraph_node_encoder
;
728 node_ref
= lto_cgraph_encoder_encode (encoder
, node
);
729 lto_output_uleb128_stream (ob
->main_stream
, node_ref
);
731 /* Note that flags will need to be read in the opposite
732 order as we are pushing the bitflags into FLAGS. */
733 bp
= bitpack_create ();
734 bp_pack_value (bp
, fs
->pure_const_state
, 2);
735 bp_pack_value (bp
, fs
->state_previously_known
, 2);
736 bp_pack_value (bp
, fs
->looping_previously_known
, 1);
737 bp_pack_value (bp
, fs
->looping
, 1);
738 bp_pack_value (bp
, fs
->can_throw
, 1);
739 lto_output_bitpack (ob
->main_stream
, bp
);
744 lto_destroy_simple_output_block (ob
);
748 /* Deserialize the ipa info for lto. */
751 pure_const_read_summary (void)
753 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
754 struct lto_file_decl_data
*file_data
;
758 while ((file_data
= file_data_vec
[j
++]))
762 struct lto_input_block
*ib
763 = lto_create_simple_input_block (file_data
,
764 LTO_section_ipa_pure_const
,
769 unsigned int count
= lto_input_uleb128 (ib
);
771 for (i
= 0; i
< count
; i
++)
774 struct cgraph_node
*node
;
775 struct bitpack_d
*bp
;
777 lto_cgraph_encoder_t encoder
;
779 fs
= XCNEW (struct funct_state_d
);
780 index
= lto_input_uleb128 (ib
);
781 encoder
= file_data
->cgraph_node_encoder
;
782 node
= lto_cgraph_encoder_deref (encoder
, index
);
783 set_function_state (node
, fs
);
785 /* Note that the flags must be read in the opposite
786 order in which they were written (the bitflags were
787 pushed into FLAGS). */
788 bp
= lto_input_bitpack (ib
);
790 = (enum pure_const_state_e
) bp_unpack_value (bp
, 2);
791 fs
->state_previously_known
792 = (enum pure_const_state_e
) bp_unpack_value (bp
, 2);
793 fs
->looping_previously_known
= bp_unpack_value (bp
, 1);
794 fs
->looping
= bp_unpack_value (bp
, 1);
795 fs
->can_throw
= bp_unpack_value (bp
, 1);
799 lto_destroy_simple_input_block (file_data
,
800 LTO_section_ipa_pure_const
,
808 ignore_edge (struct cgraph_edge
*e
)
810 return (!e
->can_throw_external
);
813 /* Return true if NODE is self recursive function. */
816 self_recursive_p (struct cgraph_node
*node
)
818 struct cgraph_edge
*e
;
819 for (e
= node
->callees
; e
; e
= e
->next_callee
)
820 if (e
->callee
== node
)
825 /* Produce the global information by preforming a transitive closure
826 on the local information that was produced by generate_summary.
827 Note that there is no function_transform pass since this only
828 updates the function_decl. */
833 struct cgraph_node
*node
;
834 struct cgraph_node
*w
;
835 struct cgraph_node
**order
=
836 XCNEWVEC (struct cgraph_node
*, cgraph_n_nodes
);
839 struct ipa_dfs_info
* w_info
;
841 cgraph_remove_function_insertion_hook (function_insertion_hook_holder
);
842 cgraph_remove_node_duplication_hook (node_duplication_hook_holder
);
843 cgraph_remove_node_removal_hook (node_removal_hook_holder
);
844 order_pos
= ipa_utils_reduced_inorder (order
, true, false, NULL
);
847 dump_cgraph (dump_file
);
848 ipa_utils_print_order(dump_file
, "reduced", order
, order_pos
);
851 /* Propagate the local information thru the call graph to produce
852 the global information. All the nodes within a cycle will have
853 the same info so we collapse cycles first. Then we can do the
854 propagation in one pass from the leaves to the roots. */
855 for (i
= 0; i
< order_pos
; i
++ )
857 enum pure_const_state_e pure_const_state
= IPA_CONST
;
858 bool looping
= false;
862 /* Find the worst state for any node in the cycle. */
866 struct cgraph_edge
*e
;
867 funct_state w_l
= get_function_state (w
);
868 if (pure_const_state
< w_l
->pure_const_state
)
869 pure_const_state
= w_l
->pure_const_state
;
873 if (cgraph_function_body_availability (w
) == AVAIL_OVERWRITABLE
)
875 looping
|= w_l
->looping_previously_known
;
876 if (pure_const_state
< w_l
->state_previously_known
)
877 pure_const_state
= w_l
->state_previously_known
;
880 if (pure_const_state
== IPA_NEITHER
)
888 for (e
= w
->callees
; e
; e
= e
->next_callee
)
890 struct cgraph_node
*y
= e
->callee
;
892 if (cgraph_function_body_availability (y
) > AVAIL_OVERWRITABLE
)
894 funct_state y_l
= get_function_state (y
);
895 if (pure_const_state
< y_l
->pure_const_state
)
896 pure_const_state
= y_l
->pure_const_state
;
897 if (pure_const_state
== IPA_NEITHER
)
904 int flags
= flags_from_decl_or_type (y
->decl
);
906 if (flags
& ECF_LOOPING_CONST_OR_PURE
)
908 if (flags
& ECF_CONST
)
910 else if ((flags
& ECF_PURE
) && pure_const_state
== IPA_CONST
)
911 pure_const_state
= IPA_PURE
;
913 pure_const_state
= IPA_NEITHER
, looping
= true;
917 w_info
= (struct ipa_dfs_info
*) w
->aux
;
918 w
= w_info
->next_cycle
;
921 /* Copy back the region's pure_const_state which is shared by
922 all nodes in the region. */
926 funct_state w_l
= get_function_state (w
);
927 enum pure_const_state_e this_state
= pure_const_state
;
928 bool this_looping
= looping
;
930 if (w_l
->state_previously_known
!= IPA_NEITHER
931 && this_state
> w_l
->state_previously_known
)
932 this_state
= w_l
->state_previously_known
;
933 if (!this_looping
&& self_recursive_p (w
))
935 if (!w_l
->looping_previously_known
)
936 this_looping
= false;
938 /* All nodes within a cycle share the same info. */
939 w_l
->pure_const_state
= this_state
;
940 w_l
->looping
= this_looping
;
945 if (!TREE_READONLY (w
->decl
) && dump_file
)
946 fprintf (dump_file
, "Function found to be %sconst: %s\n",
947 this_looping
? "looping " : "",
948 cgraph_node_name (w
));
949 TREE_READONLY (w
->decl
) = 1;
950 DECL_LOOPING_CONST_OR_PURE_P (w
->decl
) = this_looping
;
954 if (!DECL_PURE_P (w
->decl
) && dump_file
)
955 fprintf (dump_file
, "Function found to be %spure: %s\n",
956 this_looping
? "looping " : "",
957 cgraph_node_name (w
));
958 DECL_PURE_P (w
->decl
) = 1;
959 DECL_LOOPING_CONST_OR_PURE_P (w
->decl
) = this_looping
;
965 w_info
= (struct ipa_dfs_info
*) w
->aux
;
966 w
= w_info
->next_cycle
;
971 for (node
= cgraph_nodes
; node
; node
= node
->next
)
973 /* Get rid of the aux information. */
976 w_info
= (struct ipa_dfs_info
*) node
->aux
;
981 order_pos
= ipa_utils_reduced_inorder (order
, true, false, ignore_edge
);
984 dump_cgraph (dump_file
);
985 ipa_utils_print_order(dump_file
, "reduced for nothrow", order
, order_pos
);
987 /* Propagate the local information thru the call graph to produce
988 the global information. All the nodes within a cycle will have
989 the same info so we collapse cycles first. Then we can do the
990 propagation in one pass from the leaves to the roots. */
991 for (i
= 0; i
< order_pos
; i
++ )
993 bool can_throw
= false;
996 /* Find the worst state for any node in the cycle. */
1000 struct cgraph_edge
*e
;
1001 funct_state w_l
= get_function_state (w
);
1004 || cgraph_function_body_availability (w
) == AVAIL_OVERWRITABLE
)
1010 for (e
= w
->callees
; e
; e
= e
->next_callee
)
1012 struct cgraph_node
*y
= e
->callee
;
1014 if (cgraph_function_body_availability (y
) > AVAIL_OVERWRITABLE
)
1016 funct_state y_l
= get_function_state (y
);
1020 if (y_l
->can_throw
&& !TREE_NOTHROW (w
->decl
)
1021 && e
->can_throw_external
)
1024 else if (e
->can_throw_external
&& !TREE_NOTHROW (y
->decl
))
1027 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1028 w
= w_info
->next_cycle
;
1031 /* Copy back the region's pure_const_state which is shared by
1032 all nodes in the region. */
1036 funct_state w_l
= get_function_state (w
);
1037 if (!can_throw
&& !TREE_NOTHROW (w
->decl
))
1039 struct cgraph_edge
*e
;
1040 TREE_NOTHROW (w
->decl
) = true;
1041 for (e
= w
->callers
; e
; e
= e
->next_caller
)
1042 e
->can_throw_external
= false;
1044 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1045 cgraph_node_name (w
));
1047 else if (can_throw
&& !TREE_NOTHROW (w
->decl
))
1048 w_l
->can_throw
= true;
1049 w_info
= (struct ipa_dfs_info
*) w
->aux
;
1050 w
= w_info
->next_cycle
;
1055 for (node
= cgraph_nodes
; node
; node
= node
->next
)
1057 /* Get rid of the aux information. */
1060 w_info
= (struct ipa_dfs_info
*) node
->aux
;
1064 if (cgraph_function_body_availability (node
) >= AVAIL_OVERWRITABLE
)
1065 free (get_function_state (node
));
1069 VEC_free (funct_state
, heap
, funct_state_vec
);
1075 gate_pure_const (void)
1077 return (flag_ipa_pure_const
1078 /* Don't bother doing anything if the program has errors. */
1079 && !(errorcount
|| sorrycount
));
1082 struct ipa_opt_pass_d pass_ipa_pure_const
=
1086 "pure-const", /* name */
1087 gate_pure_const
, /* gate */
1088 propagate
, /* execute */
1091 0, /* static_pass_number */
1092 TV_IPA_PURE_CONST
, /* tv_id */
1093 0, /* properties_required */
1094 0, /* properties_provided */
1095 0, /* properties_destroyed */
1096 0, /* todo_flags_start */
1097 0 /* todo_flags_finish */
1099 generate_summary
, /* generate_summary */
1100 pure_const_write_summary
, /* write_summary */
1101 pure_const_read_summary
, /* read_summary */
1102 NULL
, /* function_read_summary */
1104 NULL
, /* function_transform */
1105 NULL
/* variable_transform */
1108 /* Simple local pass for pure const discovery reusing the analysis from
1109 ipa_pure_const. This pass is effective when executed together with
1110 other optimization passes in early optimization pass queue. */
1113 local_pure_const (void)
1115 bool changed
= false;
1118 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1119 we must not promote functions that are called by already processed functions. */
1121 if (function_called_by_processed_nodes_p ())
1124 fprintf (dump_file
, "Function called in recursive cycle; ignoring\n");
1127 if (cgraph_function_body_availability (cgraph_node (current_function_decl
))
1128 <= AVAIL_OVERWRITABLE
)
1131 fprintf (dump_file
, "Function has wrong visibility; ignoring\n");
1135 l
= analyze_function (cgraph_node (current_function_decl
), false);
1137 switch (l
->pure_const_state
)
1140 if (!TREE_READONLY (current_function_decl
))
1142 TREE_READONLY (current_function_decl
) = 1;
1143 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
) = l
->looping
;
1146 fprintf (dump_file
, "Function found to be %sconst: %s\n",
1147 l
->looping
? "looping " : "",
1148 lang_hooks
.decl_printable_name (current_function_decl
,
1151 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1154 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
) = false;
1157 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1158 lang_hooks
.decl_printable_name (current_function_decl
,
1164 if (!TREE_READONLY (current_function_decl
))
1166 DECL_PURE_P (current_function_decl
) = 1;
1167 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
) = l
->looping
;
1170 fprintf (dump_file
, "Function found to be %spure: %s\n",
1171 l
->looping
? "looping " : "",
1172 lang_hooks
.decl_printable_name (current_function_decl
,
1175 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
)
1178 DECL_LOOPING_CONST_OR_PURE_P (current_function_decl
) = false;
1181 fprintf (dump_file
, "Function found to be non-looping: %s\n",
1182 lang_hooks
.decl_printable_name (current_function_decl
,
1190 if (!l
->can_throw
&& !TREE_NOTHROW (current_function_decl
))
1192 struct cgraph_edge
*e
;
1194 TREE_NOTHROW (current_function_decl
) = true;
1195 for (e
= cgraph_node (current_function_decl
)->callers
;
1196 e
; e
= e
->next_caller
)
1197 e
->can_throw_external
= false;
1200 fprintf (dump_file
, "Function found to be nothrow: %s\n",
1201 lang_hooks
.decl_printable_name (current_function_decl
,
1207 return execute_fixup_cfg ();
1212 struct gimple_opt_pass pass_local_pure_const
=
1216 "local-pure-const", /* name */
1217 gate_pure_const
, /* gate */
1218 local_pure_const
, /* execute */
1221 0, /* static_pass_number */
1222 TV_IPA_PURE_CONST
, /* tv_id */
1223 0, /* properties_required */
1224 0, /* properties_provided */
1225 0, /* properties_destroyed */
1226 0, /* todo_flags_start */
1227 0 /* todo_flags_finish */