1 /* Convert a program in SSA form into Normal form.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
3 Contributed by Andrew Macleod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
36 #include "stor-layout.h"
40 #include "gimple-iterator.h"
43 #include "tree-ssa-live.h"
44 #include "tree-ssa-ter.h"
45 #include "tree-ssa-coalesce.h"
46 #include "tree-outof-ssa.h"
49 /* FIXME: A lot of code here deals with expanding to RTL. All that code
50 should be in cfgexpand.c. */
54 /* Return TRUE if expression STMT is suitable for replacement. */
57 ssa_is_replaceable_p (gimple
*stmt
)
63 /* Only consider modify stmts. */
64 if (!is_gimple_assign (stmt
))
67 /* If the statement may throw an exception, it cannot be replaced. */
68 if (stmt_could_throw_p (cfun
, stmt
))
71 /* Punt if there is more than 1 def. */
72 def
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_DEF
);
76 /* Only consider definitions which have a single use. */
77 if (!single_imm_use (def
, &use_p
, &use_stmt
))
80 /* Used in this block, but at the TOP of the block, not the end. */
81 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
84 /* There must be no VDEFs. */
85 if (gimple_vdef (stmt
))
88 /* Float expressions must go through memory if float-store is on. */
90 && FLOAT_TYPE_P (gimple_expr_type (stmt
)))
93 /* An assignment with a register variable on the RHS is not
95 if (gimple_assign_rhs_code (stmt
) == VAR_DECL
96 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
)))
99 /* No function calls can be replaced. */
100 if (is_gimple_call (stmt
))
103 /* Leave any stmt with volatile operands alone as well. */
104 if (gimple_has_volatile_ops (stmt
))
111 /* Used to hold all the components required to do SSA PHI elimination.
112 The node and pred/succ list is a simple linear list of nodes and
113 edges represented as pairs of nodes.
115 The predecessor and successor list: Nodes are entered in pairs, where
116 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent
117 predecessors, all the odd elements are successors.
120 When implemented as bitmaps, very large programs SSA->Normal times were
121 being dominated by clearing the interference graph.
123 Typically this list of edges is extremely small since it only includes
124 PHI results and uses from a single edge which have not coalesced with
125 each other. This means that no virtual PHI nodes are included, and
126 empirical evidence suggests that the number of edges rarely exceed
127 3, and in a bootstrap of GCC, the maximum size encountered was 7.
128 This also limits the number of possible nodes that are involved to
129 rarely more than 6, and in the bootstrap of gcc, the maximum number
130 of nodes encountered was 12. */
134 elim_graph (var_map map
);
136 /* Size of the elimination vectors. */
139 /* List of nodes in the elimination graph. */
142 /* The predecessor and successor edge list. */
143 auto_vec
<int> edge_list
;
145 /* Source locus on each edge */
146 auto_vec
<location_t
> edge_locus
;
148 /* Visited vector. */
149 auto_sbitmap visited
;
151 /* Stack for visited nodes. */
154 /* The variable partition map. */
157 /* Edge being eliminated by this graph. */
160 /* List of constant copies to emit. These are pushed on in pairs. */
161 auto_vec
<int> const_dests
;
162 auto_vec
<tree
> const_copies
;
164 /* Source locations for any constant copies. */
165 auto_vec
<location_t
> copy_locus
;
169 /* For an edge E find out a good source location to associate with
170 instructions inserted on edge E. If E has an implicit goto set,
171 use its location. Otherwise search instructions in predecessors
172 of E for a location, and use that one. That makes sense because
173 we insert on edges for PHI nodes, and effects of PHIs happen on
174 the end of the predecessor conceptually. */
177 set_location_for_edge (edge e
)
181 set_curr_insn_location (e
->goto_locus
);
185 basic_block bb
= e
->src
;
186 gimple_stmt_iterator gsi
;
190 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
192 gimple
*stmt
= gsi_stmt (gsi
);
193 if (is_gimple_debug (stmt
))
195 if (gimple_has_location (stmt
) || gimple_block (stmt
))
197 set_curr_insn_location (gimple_location (stmt
));
201 /* Nothing found in this basic block. Make a half-assed attempt
202 to continue with another block. */
203 if (single_pred_p (bb
))
204 bb
= single_pred (bb
);
208 while (bb
!= e
->src
);
212 /* Emit insns to copy SRC into DEST converting SRC if necessary. As
213 SRC/DEST might be BLKmode memory locations SIZEEXP is a tree from
214 which we deduce the size to copy in that case. */
216 static inline rtx_insn
*
217 emit_partition_copy (rtx dest
, rtx src
, int unsignedsrcp
, tree sizeexp
)
221 if (GET_MODE (src
) != VOIDmode
&& GET_MODE (src
) != GET_MODE (dest
))
222 src
= convert_to_mode (GET_MODE (dest
), src
, unsignedsrcp
);
223 if (GET_MODE (src
) == BLKmode
)
225 gcc_assert (GET_MODE (dest
) == BLKmode
);
226 emit_block_move (dest
, src
, expr_size (sizeexp
), BLOCK_OP_NORMAL
);
229 emit_move_insn (dest
, src
);
230 do_pending_stack_adjust ();
232 rtx_insn
*seq
= get_insns ();
238 /* Insert a copy instruction from partition SRC to DEST onto edge E. */
241 insert_partition_copy_on_edge (edge e
, int dest
, int src
, location_t locus
)
244 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
247 "Inserting a partition copy on edge BB%d->BB%d : "
250 e
->dest
->index
, dest
, src
);
251 fprintf (dump_file
, "\n");
254 gcc_assert (SA
.partition_to_pseudo
[dest
]);
255 gcc_assert (SA
.partition_to_pseudo
[src
]);
257 set_location_for_edge (e
);
258 /* If a locus is provided, override the default. */
260 set_curr_insn_location (locus
);
262 var
= partition_to_var (SA
.map
, src
);
263 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
264 copy_rtx (SA
.partition_to_pseudo
[src
]),
265 TYPE_UNSIGNED (TREE_TYPE (var
)),
268 insert_insn_on_edge (seq
, e
);
271 /* Insert a copy instruction from expression SRC to partition DEST
275 insert_value_copy_on_edge (edge e
, int dest
, tree src
, location_t locus
)
277 rtx dest_rtx
, seq
, x
;
278 machine_mode dest_mode
, src_mode
;
281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
284 "Inserting a value copy on edge BB%d->BB%d : PART.%d = ",
286 e
->dest
->index
, dest
);
287 print_generic_expr (dump_file
, src
, TDF_SLIM
);
288 fprintf (dump_file
, "\n");
291 dest_rtx
= copy_rtx (SA
.partition_to_pseudo
[dest
]);
292 gcc_assert (dest_rtx
);
294 set_location_for_edge (e
);
295 /* If a locus is provided, override the default. */
297 set_curr_insn_location (locus
);
301 tree name
= partition_to_var (SA
.map
, dest
);
302 src_mode
= TYPE_MODE (TREE_TYPE (src
));
303 dest_mode
= GET_MODE (dest_rtx
);
304 gcc_assert (src_mode
== TYPE_MODE (TREE_TYPE (name
)));
305 gcc_assert (!REG_P (dest_rtx
)
306 || dest_mode
== promote_ssa_mode (name
, &unsignedp
));
308 if (src_mode
!= dest_mode
)
310 x
= expand_expr (src
, NULL
, src_mode
, EXPAND_NORMAL
);
311 x
= convert_modes (dest_mode
, src_mode
, x
, unsignedp
);
313 else if (src_mode
== BLKmode
)
316 store_expr (src
, x
, 0, false, false);
319 x
= expand_expr (src
, dest_rtx
, dest_mode
, EXPAND_NORMAL
);
322 emit_move_insn (dest_rtx
, x
);
323 do_pending_stack_adjust ();
328 insert_insn_on_edge (seq
, e
);
331 /* Insert a copy instruction from RTL expression SRC to partition DEST
335 insert_rtx_to_part_on_edge (edge e
, int dest
, rtx src
, int unsignedsrcp
,
338 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
341 "Inserting a temp copy on edge BB%d->BB%d : PART.%d = ",
343 e
->dest
->index
, dest
);
344 print_simple_rtl (dump_file
, src
);
345 fprintf (dump_file
, "\n");
348 gcc_assert (SA
.partition_to_pseudo
[dest
]);
350 set_location_for_edge (e
);
351 /* If a locus is provided, override the default. */
353 set_curr_insn_location (locus
);
355 /* We give the destination as sizeexp in case src/dest are BLKmode
356 mems. Usually we give the source. As we result from SSA names
357 the left and right size should be the same (and no WITH_SIZE_EXPR
358 involved), so it doesn't matter. */
359 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
361 partition_to_var (SA
.map
, dest
));
363 insert_insn_on_edge (seq
, e
);
366 /* Insert a copy instruction from partition SRC to RTL lvalue DEST
370 insert_part_to_rtx_on_edge (edge e
, rtx dest
, int src
, location_t locus
)
373 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
376 "Inserting a temp copy on edge BB%d->BB%d : ",
379 print_simple_rtl (dump_file
, dest
);
380 fprintf (dump_file
, "= PART.%d\n", src
);
383 gcc_assert (SA
.partition_to_pseudo
[src
]);
385 set_location_for_edge (e
);
386 /* If a locus is provided, override the default. */
388 set_curr_insn_location (locus
);
390 var
= partition_to_var (SA
.map
, src
);
391 rtx_insn
*seq
= emit_partition_copy (dest
,
392 copy_rtx (SA
.partition_to_pseudo
[src
]),
393 TYPE_UNSIGNED (TREE_TYPE (var
)),
396 insert_insn_on_edge (seq
, e
);
400 /* Create an elimination graph for map. */
402 elim_graph::elim_graph (var_map map
) :
403 nodes (30), edge_list (20), edge_locus (10), visited (map
->num_partitions
),
404 stack (30), map (map
), const_dests (20), const_copies (20), copy_locus (10)
409 /* Empty elimination graph G. */
412 clear_elim_graph (elim_graph
*g
)
414 g
->nodes
.truncate (0);
415 g
->edge_list
.truncate (0);
416 g
->edge_locus
.truncate (0);
420 /* Return the number of nodes in graph G. */
423 elim_graph_size (elim_graph
*g
)
425 return g
->nodes
.length ();
429 /* Add NODE to graph G, if it doesn't exist already. */
432 elim_graph_add_node (elim_graph
*g
, int node
)
437 FOR_EACH_VEC_ELT (g
->nodes
, x
, t
)
440 g
->nodes
.safe_push (node
);
444 /* Add the edge PRED->SUCC to graph G. */
447 elim_graph_add_edge (elim_graph
*g
, int pred
, int succ
, location_t locus
)
449 g
->edge_list
.safe_push (pred
);
450 g
->edge_list
.safe_push (succ
);
451 g
->edge_locus
.safe_push (locus
);
455 /* Remove an edge from graph G for which NODE is the predecessor, and
456 return the successor node. -1 is returned if there is no such edge. */
459 elim_graph_remove_succ_edge (elim_graph
*g
, int node
, location_t
*locus
)
463 for (x
= 0; x
< g
->edge_list
.length (); x
+= 2)
464 if (g
->edge_list
[x
] == node
)
466 g
->edge_list
[x
] = -1;
467 y
= g
->edge_list
[x
+ 1];
468 g
->edge_list
[x
+ 1] = -1;
469 *locus
= g
->edge_locus
[x
/ 2];
470 g
->edge_locus
[x
/ 2] = UNKNOWN_LOCATION
;
473 *locus
= UNKNOWN_LOCATION
;
478 /* Find all the nodes in GRAPH which are successors to NODE in the
479 edge list. VAR will hold the partition number found. CODE is the
480 code fragment executed for every node found. */
482 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, LOCUS, CODE) \
486 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
488 y_ = (GRAPH)->edge_list[x_]; \
491 (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]); \
492 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
498 /* Find all the nodes which are predecessors of NODE in the edge list for
499 GRAPH. VAR will hold the partition number found. CODE is the
500 code fragment executed for every node found. */
502 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, LOCUS, CODE) \
506 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
508 y_ = (GRAPH)->edge_list[x_ + 1]; \
511 (void) ((VAR) = (GRAPH)->edge_list[x_]); \
512 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
518 /* Add T to elimination graph G. */
521 eliminate_name (elim_graph
*g
, int T
)
523 elim_graph_add_node (g
, T
);
526 /* Return true if this phi argument T should have a copy queued when using
527 var_map MAP. PHI nodes should contain only ssa_names and invariants. A
528 test for ssa_name is definitely simpler, but don't let invalid contents
529 slip through in the meantime. */
532 queue_phi_copy_p (var_map map
, tree t
)
534 if (TREE_CODE (t
) == SSA_NAME
)
536 if (var_to_partition (map
, t
) == NO_PARTITION
)
540 gcc_checking_assert (is_gimple_min_invariant (t
));
544 /* Build elimination graph G for basic block BB on incoming PHI edge
548 eliminate_build (elim_graph
*g
)
554 clear_elim_graph (g
);
556 for (gsi
= gsi_start_phis (g
->e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
558 gphi
*phi
= gsi
.phi ();
561 p0
= var_to_partition (g
->map
, gimple_phi_result (phi
));
562 /* Ignore results which are not in partitions. */
563 if (p0
== NO_PARTITION
)
566 Ti
= PHI_ARG_DEF (phi
, g
->e
->dest_idx
);
567 locus
= gimple_phi_arg_location_from_edge (phi
, g
->e
);
569 /* If this argument is a constant, or a SSA_NAME which is being
570 left in SSA form, just queue a copy to be emitted on this
572 if (queue_phi_copy_p (g
->map
, Ti
))
574 /* Save constant copies until all other copies have been emitted
576 g
->const_dests
.safe_push (p0
);
577 g
->const_copies
.safe_push (Ti
);
578 g
->copy_locus
.safe_push (locus
);
582 pi
= var_to_partition (g
->map
, Ti
);
585 eliminate_name (g
, p0
);
586 eliminate_name (g
, pi
);
587 elim_graph_add_edge (g
, p0
, pi
, locus
);
594 /* Push successors of T onto the elimination stack for G. */
597 elim_forward (elim_graph
*g
, int T
)
602 bitmap_set_bit (g
->visited
, T
);
603 FOR_EACH_ELIM_GRAPH_SUCC (g
, T
, S
, locus
,
605 if (!bitmap_bit_p (g
->visited
, S
))
608 g
->stack
.safe_push (T
);
612 /* Return 1 if there unvisited predecessors of T in graph G. */
615 elim_unvisited_predecessor (elim_graph
*g
, int T
)
620 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
622 if (!bitmap_bit_p (g
->visited
, P
))
628 /* Process predecessors first, and insert a copy. */
631 elim_backward (elim_graph
*g
, int T
)
636 bitmap_set_bit (g
->visited
, T
);
637 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
639 if (!bitmap_bit_p (g
->visited
, P
))
641 elim_backward (g
, P
);
642 insert_partition_copy_on_edge (g
->e
, P
, T
, locus
);
647 /* Allocate a new pseudo register usable for storing values sitting
648 in NAME (a decl or SSA name), i.e. with matching mode and attributes. */
651 get_temp_reg (tree name
)
653 tree type
= TREE_TYPE (name
);
655 machine_mode reg_mode
= promote_ssa_mode (name
, &unsignedp
);
656 rtx x
= gen_reg_rtx (reg_mode
);
657 if (POINTER_TYPE_P (type
))
658 mark_reg_pointer (x
, TYPE_ALIGN (TREE_TYPE (type
)));
662 /* Insert required copies for T in graph G. Check for a strongly connected
663 region, and create a temporary to break the cycle if one is found. */
666 elim_create (elim_graph
*g
, int T
)
671 if (elim_unvisited_predecessor (g
, T
))
673 tree var
= partition_to_var (g
->map
, T
);
674 rtx U
= get_temp_reg (var
);
675 int unsignedsrcp
= TYPE_UNSIGNED (TREE_TYPE (var
));
677 insert_part_to_rtx_on_edge (g
->e
, U
, T
, UNKNOWN_LOCATION
);
678 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
680 if (!bitmap_bit_p (g
->visited
, P
))
682 elim_backward (g
, P
);
683 insert_rtx_to_part_on_edge (g
->e
, P
, U
, unsignedsrcp
, locus
);
689 S
= elim_graph_remove_succ_edge (g
, T
, &locus
);
692 bitmap_set_bit (g
->visited
, T
);
693 insert_partition_copy_on_edge (g
->e
, T
, S
, locus
);
699 /* Eliminate all the phi nodes on edge E in graph G. */
702 eliminate_phi (edge e
, elim_graph
*g
)
706 gcc_assert (g
->const_copies
.length () == 0);
707 gcc_assert (g
->copy_locus
.length () == 0);
709 /* Abnormal edges already have everything coalesced. */
710 if (e
->flags
& EDGE_ABNORMAL
)
717 if (elim_graph_size (g
) != 0)
721 bitmap_clear (g
->visited
);
722 g
->stack
.truncate (0);
724 FOR_EACH_VEC_ELT (g
->nodes
, x
, part
)
726 if (!bitmap_bit_p (g
->visited
, part
))
727 elim_forward (g
, part
);
730 bitmap_clear (g
->visited
);
731 while (g
->stack
.length () > 0)
734 if (!bitmap_bit_p (g
->visited
, x
))
739 /* If there are any pending constant copies, issue them now. */
740 while (g
->const_copies
.length () > 0)
746 src
= g
->const_copies
.pop ();
747 dest
= g
->const_dests
.pop ();
748 locus
= g
->copy_locus
.pop ();
749 insert_value_copy_on_edge (e
, dest
, src
, locus
);
754 /* Remove each argument from PHI. If an arg was the last use of an SSA_NAME,
755 check to see if this allows another PHI node to be removed. */
758 remove_gimple_phi_args (gphi
*phi
)
763 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
765 fprintf (dump_file
, "Removing Dead PHI definition: ");
766 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
769 FOR_EACH_PHI_ARG (arg_p
, phi
, iter
, SSA_OP_USE
)
771 tree arg
= USE_FROM_PTR (arg_p
);
772 if (TREE_CODE (arg
) == SSA_NAME
)
774 /* Remove the reference to the existing argument. */
775 SET_USE (arg_p
, NULL_TREE
);
776 if (has_zero_uses (arg
))
779 gimple_stmt_iterator gsi
;
781 stmt
= SSA_NAME_DEF_STMT (arg
);
783 /* Also remove the def if it is a PHI node. */
784 if (gimple_code (stmt
) == GIMPLE_PHI
)
786 remove_gimple_phi_args (as_a
<gphi
*> (stmt
));
787 gsi
= gsi_for_stmt (stmt
);
788 remove_phi_node (&gsi
, true);
796 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses. */
799 eliminate_useless_phis (void)
805 FOR_EACH_BB_FN (bb
, cfun
)
807 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); )
809 gphi
*phi
= gsi
.phi ();
810 result
= gimple_phi_result (phi
);
811 if (virtual_operand_p (result
))
813 /* There should be no arguments which are not virtual, or the
814 results will be incorrect. */
816 for (size_t i
= 0; i
< gimple_phi_num_args (phi
); i
++)
818 tree arg
= PHI_ARG_DEF (phi
, i
);
819 if (TREE_CODE (arg
) == SSA_NAME
820 && !virtual_operand_p (arg
))
822 fprintf (stderr
, "Argument of PHI is not virtual (");
823 print_generic_expr (stderr
, arg
, TDF_SLIM
);
824 fprintf (stderr
, "), but the result is :");
825 print_gimple_stmt (stderr
, phi
, 0, TDF_SLIM
);
826 internal_error ("SSA corruption");
830 remove_phi_node (&gsi
, true);
834 /* Also remove real PHIs with no uses. */
835 if (has_zero_uses (result
))
837 remove_gimple_phi_args (phi
);
838 remove_phi_node (&gsi
, true);
848 /* This function will rewrite the current program using the variable mapping
849 found in MAP. If the replacement vector VALUES is provided, any
850 occurrences of partitions with non-null entries in the vector will be
851 replaced with the expression in the vector instead of its mapped
855 rewrite_trees (var_map map
)
861 /* Search for PHIs where the destination has no partition, but one
862 or more arguments has a partition. This should not happen and can
863 create incorrect code. */
864 FOR_EACH_BB_FN (bb
, cfun
)
867 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
869 gphi
*phi
= gsi
.phi ();
870 tree T0
= var_to_partition_to_var (map
, gimple_phi_result (phi
));
874 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
876 tree arg
= PHI_ARG_DEF (phi
, i
);
878 if (TREE_CODE (arg
) == SSA_NAME
879 && var_to_partition (map
, arg
) != NO_PARTITION
)
881 fprintf (stderr
, "Argument of PHI is in a partition :(");
882 print_generic_expr (stderr
, arg
, TDF_SLIM
);
883 fprintf (stderr
, "), but the result is not :");
884 print_gimple_stmt (stderr
, phi
, 0, TDF_SLIM
);
885 internal_error ("SSA corruption");
893 /* Create a default def for VAR. */
896 create_default_def (tree var
, void *arg ATTRIBUTE_UNUSED
)
898 if (!is_gimple_reg (var
))
901 tree ssa
= get_or_create_ssa_default_def (cfun
, var
);
905 /* Call CALLBACK for all PARM_DECLs and RESULT_DECLs for which
906 assign_parms may ask for a default partition. */
909 for_all_parms (void (*callback
)(tree var
, void *arg
), void *arg
)
911 for (tree var
= DECL_ARGUMENTS (current_function_decl
); var
;
912 var
= DECL_CHAIN (var
))
914 if (!VOID_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
915 callback (DECL_RESULT (current_function_decl
), arg
);
916 if (cfun
->static_chain_decl
)
917 callback (cfun
->static_chain_decl
, arg
);
920 /* We need to pass two arguments to set_parm_default_def_partition,
921 but for_all_parms only supports one. Use a pair. */
923 typedef std::pair
<var_map
, bitmap
> parm_default_def_partition_arg
;
925 /* Set in ARG's PARTS bitmap the bit corresponding to the partition in
926 ARG's MAP containing VAR's default def. */
929 set_parm_default_def_partition (tree var
, void *arg_
)
931 parm_default_def_partition_arg
*arg
= (parm_default_def_partition_arg
*)arg_
;
932 var_map map
= arg
->first
;
933 bitmap parts
= arg
->second
;
935 if (!is_gimple_reg (var
))
938 tree ssa
= ssa_default_def (cfun
, var
);
941 int version
= var_to_partition (map
, ssa
);
942 gcc_assert (version
!= NO_PARTITION
);
944 bool changed
= bitmap_set_bit (parts
, version
);
945 gcc_assert (changed
);
948 /* Allocate and return a bitmap that has a bit set for each partition
949 that contains a default def for a parameter. */
952 get_parm_default_def_partitions (var_map map
)
954 bitmap parm_default_def_parts
= BITMAP_ALLOC (NULL
);
956 parm_default_def_partition_arg
957 arg
= std::make_pair (map
, parm_default_def_parts
);
959 for_all_parms (set_parm_default_def_partition
, &arg
);
961 return parm_default_def_parts
;
964 /* Allocate and return a bitmap that has a bit set for each partition
965 that contains an undefined value. */
968 get_undefined_value_partitions (var_map map
)
970 bitmap undefined_value_parts
= BITMAP_ALLOC (NULL
);
972 for (unsigned int i
= 1; i
< num_ssa_names
; i
++)
974 tree var
= ssa_name (i
);
976 && !virtual_operand_p (var
)
977 && !has_zero_uses (var
)
978 && ssa_undefined_value_p (var
))
980 const int p
= var_to_partition (map
, var
);
981 if (p
!= NO_PARTITION
)
982 bitmap_set_bit (undefined_value_parts
, p
);
986 return undefined_value_parts
;
989 /* Given the out-of-ssa info object SA (with prepared partitions)
990 eliminate all phi nodes in all basic blocks. Afterwards no
991 basic block will have phi nodes anymore and there are possibly
992 some RTL instructions inserted on edges. */
995 expand_phi_nodes (struct ssaexpand
*sa
)
998 elim_graph
g (sa
->map
);
1000 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
,
1001 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
1002 if (!gimple_seq_empty_p (phi_nodes (bb
)))
1006 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1007 eliminate_phi (e
, &g
);
1008 set_phi_nodes (bb
, NULL
);
1009 /* We can't redirect EH edges in RTL land, so we need to do this
1010 here. Redirection happens only when splitting is necessary,
1011 which it is only for critical edges, normally. For EH edges
1012 it might also be necessary when the successor has more than
1013 one predecessor. In that case the edge is either required to
1014 be fallthru (which EH edges aren't), or the predecessor needs
1015 to end with a jump (which again, isn't the case with EH edges).
1016 Hence, split all EH edges on which we inserted instructions
1017 and whose successor has multiple predecessors. */
1018 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
1020 if (e
->insns
.r
&& (e
->flags
& EDGE_EH
)
1021 && !single_pred_p (e
->dest
))
1023 rtx_insn
*insns
= e
->insns
.r
;
1026 bb
= split_edge (e
);
1027 single_pred_edge (bb
)->insns
.r
= insns
;
1036 /* Remove the ssa-names in the current function and translate them into normal
1037 compiler variables. PERFORM_TER is true if Temporary Expression Replacement
1038 should also be used. */
1041 remove_ssa_form (bool perform_ter
, struct ssaexpand
*sa
)
1043 bitmap values
= NULL
;
1046 for_all_parms (create_default_def
, NULL
);
1047 map
= init_var_map (num_ssa_names
);
1048 coalesce_ssa_name (map
);
1050 /* Return to viewing the variable list as just all reference variables after
1051 coalescing has been performed. */
1052 partition_view_normal (map
);
1054 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1056 fprintf (dump_file
, "After Coalescing:\n");
1057 dump_var_map (dump_file
, map
);
1062 values
= find_replaceable_exprs (map
);
1063 if (values
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
1064 dump_replaceable_exprs (dump_file
, values
);
1067 rewrite_trees (map
);
1070 sa
->values
= values
;
1071 sa
->partitions_for_parm_default_defs
= get_parm_default_def_partitions (map
);
1072 sa
->partitions_for_undefined_values
= get_undefined_value_partitions (map
);
1076 /* If not already done so for basic block BB, assign increasing uids
1077 to each of its instructions. */
1080 maybe_renumber_stmts_bb (basic_block bb
)
1083 gimple_stmt_iterator gsi
;
1088 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1090 gimple
*stmt
= gsi_stmt (gsi
);
1091 gimple_set_uid (stmt
, i
);
1097 /* Return true if we can determine that the SSA_NAMEs RESULT (a result
1098 of a PHI node) and ARG (one of its arguments) conflict. Return false
1099 otherwise, also when we simply aren't sure. */
1102 trivially_conflicts_p (basic_block bb
, tree result
, tree arg
)
1105 imm_use_iterator imm_iter
;
1106 gimple
*defa
= SSA_NAME_DEF_STMT (arg
);
1108 /* If ARG isn't defined in the same block it's too complicated for
1110 if (gimple_bb (defa
) != bb
)
1113 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, result
)
1115 gimple
*use_stmt
= USE_STMT (use
);
1116 if (is_gimple_debug (use_stmt
))
1118 /* Now, if there's a use of RESULT that lies outside this basic block,
1119 then there surely is a conflict with ARG. */
1120 if (gimple_bb (use_stmt
) != bb
)
1122 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1124 /* The use now is in a real stmt of BB, so if ARG was defined
1125 in a PHI node (like RESULT) both conflict. */
1126 if (gimple_code (defa
) == GIMPLE_PHI
)
1128 maybe_renumber_stmts_bb (bb
);
1129 /* If the use of RESULT occurs after the definition of ARG,
1130 the two conflict too. */
1131 if (gimple_uid (defa
) < gimple_uid (use_stmt
))
1139 /* Search every PHI node for arguments associated with backedges which
1140 we can trivially determine will need a copy (the argument is either
1141 not an SSA_NAME or the argument has a different underlying variable
1142 than the PHI result).
1144 Insert a copy from the PHI argument to a new destination at the
1145 end of the block with the backedge to the top of the loop. Update
1146 the PHI argument to reference this new destination. */
1149 insert_backedge_copies (void)
1154 mark_dfs_back_edges ();
1156 FOR_EACH_BB_FN (bb
, cfun
)
1158 /* Mark block as possibly needing calculation of UIDs. */
1161 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1163 gphi
*phi
= gsi
.phi ();
1164 tree result
= gimple_phi_result (phi
);
1167 if (virtual_operand_p (result
))
1170 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1172 tree arg
= gimple_phi_arg_def (phi
, i
);
1173 edge e
= gimple_phi_arg_edge (phi
, i
);
1174 /* We are only interested in copies emitted on critical
1176 if (!(e
->flags
& EDGE_DFS_BACK
)
1177 || !EDGE_CRITICAL_P (e
))
1180 /* If the argument is not an SSA_NAME, then we will need a
1181 constant initialization. If the argument is an SSA_NAME then
1182 a copy statement may be needed. First handle the case
1183 where we cannot insert before the argument definition. */
1184 if (TREE_CODE (arg
) != SSA_NAME
1185 || (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_PHI
1186 && trivially_conflicts_p (bb
, result
, arg
)))
1190 gimple
*last
= NULL
;
1191 gimple_stmt_iterator gsi2
;
1193 gsi2
= gsi_last_bb (gimple_phi_arg_edge (phi
, i
)->src
);
1194 if (!gsi_end_p (gsi2
))
1195 last
= gsi_stmt (gsi2
);
1197 /* In theory the only way we ought to get back to the
1198 start of a loop should be with a COND_EXPR or GOTO_EXPR.
1199 However, better safe than sorry.
1200 If the block ends with a control statement or
1201 something that might throw, then we have to
1202 insert this assignment before the last
1203 statement. Else insert it after the last statement. */
1204 if (last
&& stmt_ends_bb_p (last
))
1206 /* If the last statement in the block is the definition
1207 site of the PHI argument, then we can't insert
1208 anything after it. */
1209 if (TREE_CODE (arg
) == SSA_NAME
1210 && SSA_NAME_DEF_STMT (arg
) == last
)
1214 /* Create a new instance of the underlying variable of the
1216 name
= copy_ssa_name (result
);
1217 stmt
= gimple_build_assign (name
,
1218 gimple_phi_arg_def (phi
, i
));
1220 /* copy location if present. */
1221 if (gimple_phi_arg_has_location (phi
, i
))
1222 gimple_set_location (stmt
,
1223 gimple_phi_arg_location (phi
, i
));
1225 /* Insert the new statement into the block and update
1227 if (last
&& stmt_ends_bb_p (last
))
1228 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
1230 gsi_insert_after (&gsi2
, stmt
, GSI_NEW_STMT
);
1231 SET_PHI_ARG_DEF (phi
, i
, name
);
1233 /* Insert a copy before the definition of the backedge value
1234 and adjust all conflicting uses. */
1235 else if (trivially_conflicts_p (bb
, result
, arg
))
1237 gimple
*def
= SSA_NAME_DEF_STMT (arg
);
1238 if (gimple_nop_p (def
)
1239 || gimple_code (def
) == GIMPLE_PHI
)
1241 tree name
= copy_ssa_name (result
);
1242 gimple
*stmt
= gimple_build_assign (name
, result
);
1243 imm_use_iterator imm_iter
;
1245 /* The following matches trivially_conflicts_p. */
1246 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, result
)
1248 if (gimple_bb (use_stmt
) != bb
1249 || (gimple_code (use_stmt
) != GIMPLE_PHI
1250 && (maybe_renumber_stmts_bb (bb
), true)
1251 && gimple_uid (use_stmt
) > gimple_uid (def
)))
1254 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1255 SET_USE (use
, name
);
1258 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
1259 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
1264 /* Unmark this block again. */
1269 /* Free all memory associated with going out of SSA form. SA is
1270 the outof-SSA info object. */
1273 finish_out_of_ssa (struct ssaexpand
*sa
)
1275 free (sa
->partition_to_pseudo
);
1277 BITMAP_FREE (sa
->values
);
1278 delete_var_map (sa
->map
);
1279 BITMAP_FREE (sa
->partitions_for_parm_default_defs
);
1280 BITMAP_FREE (sa
->partitions_for_undefined_values
);
1281 memset (sa
, 0, sizeof *sa
);
1284 /* Take the current function out of SSA form, translating PHIs as described in
1285 R. Morgan, ``Building an Optimizing Compiler'',
1286 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
1289 rewrite_out_of_ssa (struct ssaexpand
*sa
)
1291 /* If elimination of a PHI requires inserting a copy on a backedge,
1292 then we will have to split the backedge which has numerous
1293 undesirable performance effects.
1295 A significant number of such cases can be handled here by inserting
1296 copies into the loop itself. */
1297 insert_backedge_copies ();
1300 /* Eliminate PHIs which are of no use, such as virtual or dead phis. */
1301 eliminate_useless_phis ();
1303 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1304 gimple_dump_cfg (dump_file
, dump_flags
& ~TDF_DETAILS
);
1306 remove_ssa_form (flag_tree_ter
, sa
);
1308 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1309 gimple_dump_cfg (dump_file
, dump_flags
& ~TDF_DETAILS
);