1 /* Convert a program in SSA form into Normal form.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Andrew Macleod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
36 #include "stor-layout.h"
40 #include "gimple-iterator.h"
43 #include "tree-ssa-live.h"
44 #include "tree-ssa-ter.h"
45 #include "tree-ssa-coalesce.h"
46 #include "tree-outof-ssa.h"
49 /* FIXME: A lot of code here deals with expanding to RTL. All that code
50 should be in cfgexpand.c. */
54 /* Return TRUE if expression STMT is suitable for replacement. */
57 ssa_is_replaceable_p (gimple
*stmt
)
63 /* Only consider modify stmts. */
64 if (!is_gimple_assign (stmt
))
67 /* If the statement may throw an exception, it cannot be replaced. */
68 if (stmt_could_throw_p (cfun
, stmt
))
71 /* Punt if there is more than 1 def. */
72 def
= SINGLE_SSA_TREE_OPERAND (stmt
, SSA_OP_DEF
);
76 /* Only consider definitions which have a single use. */
77 if (!single_imm_use (def
, &use_p
, &use_stmt
))
80 /* Used in this block, but at the TOP of the block, not the end. */
81 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
84 /* There must be no VDEFs. */
85 if (gimple_vdef (stmt
))
88 /* Float expressions must go through memory if float-store is on. */
90 && FLOAT_TYPE_P (TREE_TYPE (def
)))
93 /* An assignment with a register variable on the RHS is not
95 if (gimple_assign_rhs_code (stmt
) == VAR_DECL
96 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
)))
99 /* No function calls can be replaced. */
100 if (is_gimple_call (stmt
))
103 /* Leave any stmt with volatile operands alone as well. */
104 if (gimple_has_volatile_ops (stmt
))
111 /* Used to hold all the components required to do SSA PHI elimination.
112 The node and pred/succ list is a simple linear list of nodes and
113 edges represented as pairs of nodes.
115 The predecessor and successor list: Nodes are entered in pairs, where
116 [0] ->PRED, [1]->SUCC. All the even indexes in the array represent
117 predecessors, all the odd elements are successors.
120 When implemented as bitmaps, very large programs SSA->Normal times were
121 being dominated by clearing the interference graph.
123 Typically this list of edges is extremely small since it only includes
124 PHI results and uses from a single edge which have not coalesced with
125 each other. This means that no virtual PHI nodes are included, and
126 empirical evidence suggests that the number of edges rarely exceed
127 3, and in a bootstrap of GCC, the maximum size encountered was 7.
128 This also limits the number of possible nodes that are involved to
129 rarely more than 6, and in the bootstrap of gcc, the maximum number
130 of nodes encountered was 12. */
135 elim_graph (var_map map
);
137 /* Size of the elimination vectors. */
140 /* List of nodes in the elimination graph. */
143 /* The predecessor and successor edge list. */
144 auto_vec
<int> edge_list
;
146 /* Source locus on each edge */
147 auto_vec
<location_t
> edge_locus
;
149 /* Visited vector. */
150 auto_sbitmap visited
;
152 /* Stack for visited nodes. */
155 /* The variable partition map. */
158 /* Edge being eliminated by this graph. */
161 /* List of constant copies to emit. These are pushed on in pairs. */
162 auto_vec
<int> const_dests
;
163 auto_vec
<tree
> const_copies
;
165 /* Source locations for any constant copies. */
166 auto_vec
<location_t
> copy_locus
;
170 /* For an edge E find out a good source location to associate with
171 instructions inserted on edge E. If E has an implicit goto set,
172 use its location. Otherwise search instructions in predecessors
173 of E for a location, and use that one. That makes sense because
174 we insert on edges for PHI nodes, and effects of PHIs happen on
175 the end of the predecessor conceptually. An exception is made
176 for EH edges because we don't want to drag the source location
177 of unrelated statements at the beginning of handlers; they would
178 be further reused for various EH constructs, which would damage
179 the coverage information. */
182 set_location_for_edge (edge e
)
185 set_curr_insn_location (e
->goto_locus
);
186 else if (e
->flags
& EDGE_EH
)
188 basic_block bb
= e
->dest
;
189 gimple_stmt_iterator gsi
;
193 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
195 gimple
*stmt
= gsi_stmt (gsi
);
196 if (is_gimple_debug (stmt
))
198 if (gimple_has_location (stmt
) || gimple_block (stmt
))
200 set_curr_insn_location (gimple_location (stmt
));
204 /* Nothing found in this basic block. Make a half-assed attempt
205 to continue with another block. */
206 if (single_succ_p (bb
))
207 bb
= single_succ (bb
);
211 while (bb
!= e
->dest
);
215 basic_block bb
= e
->src
;
216 gimple_stmt_iterator gsi
;
220 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
222 gimple
*stmt
= gsi_stmt (gsi
);
223 if (is_gimple_debug (stmt
))
225 if (gimple_has_location (stmt
) || gimple_block (stmt
))
227 set_curr_insn_location (gimple_location (stmt
));
231 /* Nothing found in this basic block. Make a half-assed attempt
232 to continue with another block. */
233 if (single_pred_p (bb
))
234 bb
= single_pred (bb
);
238 while (bb
!= e
->src
);
242 /* Emit insns to copy SRC into DEST converting SRC if necessary. As
243 SRC/DEST might be BLKmode memory locations SIZEEXP is a tree from
244 which we deduce the size to copy in that case. */
246 static inline rtx_insn
*
247 emit_partition_copy (rtx dest
, rtx src
, int unsignedsrcp
, tree sizeexp
)
251 if (GET_MODE (src
) != VOIDmode
&& GET_MODE (src
) != GET_MODE (dest
))
252 src
= convert_to_mode (GET_MODE (dest
), src
, unsignedsrcp
);
253 if (GET_MODE (src
) == BLKmode
)
255 gcc_assert (GET_MODE (dest
) == BLKmode
);
256 emit_block_move (dest
, src
, expr_size (sizeexp
), BLOCK_OP_NORMAL
);
259 emit_move_insn (dest
, src
);
260 do_pending_stack_adjust ();
262 rtx_insn
*seq
= get_insns ();
268 /* Insert a copy instruction from partition SRC to DEST onto edge E. */
271 insert_partition_copy_on_edge (edge e
, int dest
, int src
, location_t locus
)
274 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
277 "Inserting a partition copy on edge BB%d->BB%d : "
280 e
->dest
->index
, dest
, src
);
281 fprintf (dump_file
, "\n");
284 gcc_assert (SA
.partition_to_pseudo
[dest
]);
285 gcc_assert (SA
.partition_to_pseudo
[src
]);
287 set_location_for_edge (e
);
288 /* If a locus is provided, override the default. */
290 set_curr_insn_location (locus
);
292 var
= partition_to_var (SA
.map
, src
);
293 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
294 copy_rtx (SA
.partition_to_pseudo
[src
]),
295 TYPE_UNSIGNED (TREE_TYPE (var
)),
298 insert_insn_on_edge (seq
, e
);
301 /* Insert a copy instruction from expression SRC to partition DEST
305 insert_value_copy_on_edge (edge e
, int dest
, tree src
, location_t locus
)
307 rtx dest_rtx
, seq
, x
;
308 machine_mode dest_mode
, src_mode
;
311 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
314 "Inserting a value copy on edge BB%d->BB%d : PART.%d = ",
316 e
->dest
->index
, dest
);
317 print_generic_expr (dump_file
, src
, TDF_SLIM
);
318 fprintf (dump_file
, "\n");
321 dest_rtx
= copy_rtx (SA
.partition_to_pseudo
[dest
]);
322 gcc_assert (dest_rtx
);
324 set_location_for_edge (e
);
325 /* If a locus is provided, override the default. */
327 set_curr_insn_location (locus
);
331 tree name
= partition_to_var (SA
.map
, dest
);
332 src_mode
= TYPE_MODE (TREE_TYPE (src
));
333 dest_mode
= GET_MODE (dest_rtx
);
334 gcc_assert (src_mode
== TYPE_MODE (TREE_TYPE (name
)));
335 gcc_assert (!REG_P (dest_rtx
)
336 || dest_mode
== promote_ssa_mode (name
, &unsignedp
));
338 if (src_mode
!= dest_mode
)
340 x
= expand_expr (src
, NULL
, src_mode
, EXPAND_NORMAL
);
341 x
= convert_modes (dest_mode
, src_mode
, x
, unsignedp
);
343 else if (src_mode
== BLKmode
)
346 store_expr (src
, x
, 0, false, false);
349 x
= expand_expr (src
, dest_rtx
, dest_mode
, EXPAND_NORMAL
);
352 emit_move_insn (dest_rtx
, x
);
353 do_pending_stack_adjust ();
358 insert_insn_on_edge (seq
, e
);
361 /* Insert a copy instruction from RTL expression SRC to partition DEST
365 insert_rtx_to_part_on_edge (edge e
, int dest
, rtx src
, int unsignedsrcp
,
368 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
371 "Inserting a temp copy on edge BB%d->BB%d : PART.%d = ",
373 e
->dest
->index
, dest
);
374 print_simple_rtl (dump_file
, src
);
375 fprintf (dump_file
, "\n");
378 gcc_assert (SA
.partition_to_pseudo
[dest
]);
380 set_location_for_edge (e
);
381 /* If a locus is provided, override the default. */
383 set_curr_insn_location (locus
);
385 /* We give the destination as sizeexp in case src/dest are BLKmode
386 mems. Usually we give the source. As we result from SSA names
387 the left and right size should be the same (and no WITH_SIZE_EXPR
388 involved), so it doesn't matter. */
389 rtx_insn
*seq
= emit_partition_copy (copy_rtx (SA
.partition_to_pseudo
[dest
]),
391 partition_to_var (SA
.map
, dest
));
393 insert_insn_on_edge (seq
, e
);
396 /* Insert a copy instruction from partition SRC to RTL lvalue DEST
400 insert_part_to_rtx_on_edge (edge e
, rtx dest
, int src
, location_t locus
)
403 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
406 "Inserting a temp copy on edge BB%d->BB%d : ",
409 print_simple_rtl (dump_file
, dest
);
410 fprintf (dump_file
, "= PART.%d\n", src
);
413 gcc_assert (SA
.partition_to_pseudo
[src
]);
415 set_location_for_edge (e
);
416 /* If a locus is provided, override the default. */
418 set_curr_insn_location (locus
);
420 var
= partition_to_var (SA
.map
, src
);
421 rtx_insn
*seq
= emit_partition_copy (dest
,
422 copy_rtx (SA
.partition_to_pseudo
[src
]),
423 TYPE_UNSIGNED (TREE_TYPE (var
)),
426 insert_insn_on_edge (seq
, e
);
430 /* Create an elimination graph for map. */
432 elim_graph::elim_graph (var_map map
) :
433 nodes (30), edge_list (20), edge_locus (10), visited (map
->num_partitions
),
434 stack (30), map (map
), const_dests (20), const_copies (20), copy_locus (10)
439 /* Empty elimination graph G. */
442 clear_elim_graph (elim_graph
*g
)
444 g
->nodes
.truncate (0);
445 g
->edge_list
.truncate (0);
446 g
->edge_locus
.truncate (0);
450 /* Return the number of nodes in graph G. */
453 elim_graph_size (elim_graph
*g
)
455 return g
->nodes
.length ();
459 /* Add NODE to graph G, if it doesn't exist already. */
462 elim_graph_add_node (elim_graph
*g
, int node
)
467 FOR_EACH_VEC_ELT (g
->nodes
, x
, t
)
470 g
->nodes
.safe_push (node
);
474 /* Add the edge PRED->SUCC to graph G. */
477 elim_graph_add_edge (elim_graph
*g
, int pred
, int succ
, location_t locus
)
479 g
->edge_list
.safe_push (pred
);
480 g
->edge_list
.safe_push (succ
);
481 g
->edge_locus
.safe_push (locus
);
485 /* Remove an edge from graph G for which NODE is the predecessor, and
486 return the successor node. -1 is returned if there is no such edge. */
489 elim_graph_remove_succ_edge (elim_graph
*g
, int node
, location_t
*locus
)
493 for (x
= 0; x
< g
->edge_list
.length (); x
+= 2)
494 if (g
->edge_list
[x
] == node
)
496 g
->edge_list
[x
] = -1;
497 y
= g
->edge_list
[x
+ 1];
498 g
->edge_list
[x
+ 1] = -1;
499 *locus
= g
->edge_locus
[x
/ 2];
500 g
->edge_locus
[x
/ 2] = UNKNOWN_LOCATION
;
503 *locus
= UNKNOWN_LOCATION
;
508 /* Find all the nodes in GRAPH which are successors to NODE in the
509 edge list. VAR will hold the partition number found. CODE is the
510 code fragment executed for every node found. */
512 #define FOR_EACH_ELIM_GRAPH_SUCC(GRAPH, NODE, VAR, LOCUS, CODE) \
516 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
518 y_ = (GRAPH)->edge_list[x_]; \
521 (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]); \
522 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
528 /* Find all the nodes which are predecessors of NODE in the edge list for
529 GRAPH. VAR will hold the partition number found. CODE is the
530 code fragment executed for every node found. */
532 #define FOR_EACH_ELIM_GRAPH_PRED(GRAPH, NODE, VAR, LOCUS, CODE) \
536 for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
538 y_ = (GRAPH)->edge_list[x_ + 1]; \
541 (void) ((VAR) = (GRAPH)->edge_list[x_]); \
542 (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
548 /* Add T to elimination graph G. */
551 eliminate_name (elim_graph
*g
, int T
)
553 elim_graph_add_node (g
, T
);
556 /* Return true if this phi argument T should have a copy queued when using
557 var_map MAP. PHI nodes should contain only ssa_names and invariants. A
558 test for ssa_name is definitely simpler, but don't let invalid contents
559 slip through in the meantime. */
562 queue_phi_copy_p (var_map map
, tree t
)
564 if (TREE_CODE (t
) == SSA_NAME
)
566 if (var_to_partition (map
, t
) == NO_PARTITION
)
570 gcc_checking_assert (is_gimple_min_invariant (t
));
574 /* Build elimination graph G for basic block BB on incoming PHI edge
578 eliminate_build (elim_graph
*g
)
584 clear_elim_graph (g
);
586 for (gsi
= gsi_start_phis (g
->e
->dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
588 gphi
*phi
= gsi
.phi ();
591 p0
= var_to_partition (g
->map
, gimple_phi_result (phi
));
592 /* Ignore results which are not in partitions. */
593 if (p0
== NO_PARTITION
)
596 Ti
= PHI_ARG_DEF (phi
, g
->e
->dest_idx
);
597 /* See set_location_for_edge for the rationale. */
598 if (g
->e
->flags
& EDGE_EH
)
599 locus
= UNKNOWN_LOCATION
;
601 locus
= gimple_phi_arg_location_from_edge (phi
, g
->e
);
603 /* If this argument is a constant, or a SSA_NAME which is being
604 left in SSA form, just queue a copy to be emitted on this
606 if (queue_phi_copy_p (g
->map
, Ti
))
608 /* Save constant copies until all other copies have been emitted
610 g
->const_dests
.safe_push (p0
);
611 g
->const_copies
.safe_push (Ti
);
612 g
->copy_locus
.safe_push (locus
);
616 pi
= var_to_partition (g
->map
, Ti
);
619 eliminate_name (g
, p0
);
620 eliminate_name (g
, pi
);
621 elim_graph_add_edge (g
, p0
, pi
, locus
);
628 /* Push successors of T onto the elimination stack for G. */
631 elim_forward (elim_graph
*g
, int T
)
636 bitmap_set_bit (g
->visited
, T
);
637 FOR_EACH_ELIM_GRAPH_SUCC (g
, T
, S
, locus
,
639 if (!bitmap_bit_p (g
->visited
, S
))
642 g
->stack
.safe_push (T
);
646 /* Return 1 if there unvisited predecessors of T in graph G. */
649 elim_unvisited_predecessor (elim_graph
*g
, int T
)
654 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
656 if (!bitmap_bit_p (g
->visited
, P
))
662 /* Process predecessors first, and insert a copy. */
665 elim_backward (elim_graph
*g
, int T
)
670 bitmap_set_bit (g
->visited
, T
);
671 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
673 if (!bitmap_bit_p (g
->visited
, P
))
675 elim_backward (g
, P
);
676 insert_partition_copy_on_edge (g
->e
, P
, T
, locus
);
681 /* Allocate a new pseudo register usable for storing values sitting
682 in NAME (a decl or SSA name), i.e. with matching mode and attributes. */
685 get_temp_reg (tree name
)
687 tree type
= TREE_TYPE (name
);
689 machine_mode reg_mode
= promote_ssa_mode (name
, &unsignedp
);
690 if (reg_mode
== BLKmode
)
691 return assign_temp (type
, 0, 0);
692 rtx x
= gen_reg_rtx (reg_mode
);
693 if (POINTER_TYPE_P (type
))
694 mark_reg_pointer (x
, TYPE_ALIGN (TREE_TYPE (type
)));
698 /* Insert required copies for T in graph G. Check for a strongly connected
699 region, and create a temporary to break the cycle if one is found. */
702 elim_create (elim_graph
*g
, int T
)
707 if (elim_unvisited_predecessor (g
, T
))
709 tree var
= partition_to_var (g
->map
, T
);
710 rtx U
= get_temp_reg (var
);
711 int unsignedsrcp
= TYPE_UNSIGNED (TREE_TYPE (var
));
713 insert_part_to_rtx_on_edge (g
->e
, U
, T
, UNKNOWN_LOCATION
);
714 FOR_EACH_ELIM_GRAPH_PRED (g
, T
, P
, locus
,
716 if (!bitmap_bit_p (g
->visited
, P
))
718 elim_backward (g
, P
);
719 insert_rtx_to_part_on_edge (g
->e
, P
, U
, unsignedsrcp
, locus
);
725 S
= elim_graph_remove_succ_edge (g
, T
, &locus
);
728 bitmap_set_bit (g
->visited
, T
);
729 insert_partition_copy_on_edge (g
->e
, T
, S
, locus
);
735 /* Eliminate all the phi nodes on edge E in graph G. */
738 eliminate_phi (edge e
, elim_graph
*g
)
742 gcc_assert (g
->const_copies
.length () == 0);
743 gcc_assert (g
->copy_locus
.length () == 0);
745 /* Abnormal edges already have everything coalesced. */
746 if (e
->flags
& EDGE_ABNORMAL
)
753 if (elim_graph_size (g
) != 0)
757 bitmap_clear (g
->visited
);
758 g
->stack
.truncate (0);
760 FOR_EACH_VEC_ELT (g
->nodes
, x
, part
)
762 if (!bitmap_bit_p (g
->visited
, part
))
763 elim_forward (g
, part
);
766 bitmap_clear (g
->visited
);
767 while (g
->stack
.length () > 0)
770 if (!bitmap_bit_p (g
->visited
, x
))
775 /* If there are any pending constant copies, issue them now. */
776 while (g
->const_copies
.length () > 0)
782 src
= g
->const_copies
.pop ();
783 dest
= g
->const_dests
.pop ();
784 locus
= g
->copy_locus
.pop ();
785 insert_value_copy_on_edge (e
, dest
, src
, locus
);
790 /* Remove each argument from PHI. If an arg was the last use of an SSA_NAME,
791 check to see if this allows another PHI node to be removed. */
794 remove_gimple_phi_args (gphi
*phi
)
799 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
801 fprintf (dump_file
, "Removing Dead PHI definition: ");
802 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
805 FOR_EACH_PHI_ARG (arg_p
, phi
, iter
, SSA_OP_USE
)
807 tree arg
= USE_FROM_PTR (arg_p
);
808 if (TREE_CODE (arg
) == SSA_NAME
)
810 /* Remove the reference to the existing argument. */
811 SET_USE (arg_p
, NULL_TREE
);
812 if (has_zero_uses (arg
))
815 gimple_stmt_iterator gsi
;
817 stmt
= SSA_NAME_DEF_STMT (arg
);
819 /* Also remove the def if it is a PHI node. */
820 if (gimple_code (stmt
) == GIMPLE_PHI
)
822 remove_gimple_phi_args (as_a
<gphi
*> (stmt
));
823 gsi
= gsi_for_stmt (stmt
);
824 remove_phi_node (&gsi
, true);
832 /* Remove any PHI node which is a virtual PHI, or a PHI with no uses. */
835 eliminate_useless_phis (void)
841 FOR_EACH_BB_FN (bb
, cfun
)
843 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); )
845 gphi
*phi
= gsi
.phi ();
846 result
= gimple_phi_result (phi
);
847 if (virtual_operand_p (result
))
848 remove_phi_node (&gsi
, true);
851 /* Also remove real PHIs with no uses. */
852 if (has_zero_uses (result
))
854 remove_gimple_phi_args (phi
);
855 remove_phi_node (&gsi
, true);
865 /* This function will rewrite the current program using the variable mapping
866 found in MAP. If the replacement vector VALUES is provided, any
867 occurrences of partitions with non-null entries in the vector will be
868 replaced with the expression in the vector instead of its mapped
872 rewrite_trees (var_map map
)
878 /* Search for PHIs where the destination has no partition, but one
879 or more arguments has a partition. This should not happen and can
880 create incorrect code. */
881 FOR_EACH_BB_FN (bb
, cfun
)
884 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
886 gphi
*phi
= gsi
.phi ();
887 tree T0
= var_to_partition_to_var (map
, gimple_phi_result (phi
));
891 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
893 tree arg
= PHI_ARG_DEF (phi
, i
);
895 if (TREE_CODE (arg
) == SSA_NAME
896 && var_to_partition (map
, arg
) != NO_PARTITION
)
898 fprintf (stderr
, "Argument of PHI is in a partition :(");
899 print_generic_expr (stderr
, arg
, TDF_SLIM
);
900 fprintf (stderr
, "), but the result is not :");
901 print_gimple_stmt (stderr
, phi
, 0, TDF_SLIM
);
902 internal_error ("SSA corruption");
910 /* Create a default def for VAR. */
913 create_default_def (tree var
, void *arg ATTRIBUTE_UNUSED
)
915 if (!is_gimple_reg (var
))
918 tree ssa
= get_or_create_ssa_default_def (cfun
, var
);
922 /* Call CALLBACK for all PARM_DECLs and RESULT_DECLs for which
923 assign_parms may ask for a default partition. */
926 for_all_parms (void (*callback
)(tree var
, void *arg
), void *arg
)
928 for (tree var
= DECL_ARGUMENTS (current_function_decl
); var
;
929 var
= DECL_CHAIN (var
))
931 if (!VOID_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl
))))
932 callback (DECL_RESULT (current_function_decl
), arg
);
933 if (cfun
->static_chain_decl
)
934 callback (cfun
->static_chain_decl
, arg
);
937 /* We need to pass two arguments to set_parm_default_def_partition,
938 but for_all_parms only supports one. Use a pair. */
940 typedef std::pair
<var_map
, bitmap
> parm_default_def_partition_arg
;
942 /* Set in ARG's PARTS bitmap the bit corresponding to the partition in
943 ARG's MAP containing VAR's default def. */
946 set_parm_default_def_partition (tree var
, void *arg_
)
948 parm_default_def_partition_arg
*arg
= (parm_default_def_partition_arg
*)arg_
;
949 var_map map
= arg
->first
;
950 bitmap parts
= arg
->second
;
952 if (!is_gimple_reg (var
))
955 tree ssa
= ssa_default_def (cfun
, var
);
958 int version
= var_to_partition (map
, ssa
);
959 gcc_assert (version
!= NO_PARTITION
);
961 bool changed
= bitmap_set_bit (parts
, version
);
962 gcc_assert (changed
);
965 /* Allocate and return a bitmap that has a bit set for each partition
966 that contains a default def for a parameter. */
969 get_parm_default_def_partitions (var_map map
)
971 bitmap parm_default_def_parts
= BITMAP_ALLOC (NULL
);
973 parm_default_def_partition_arg
974 arg
= std::make_pair (map
, parm_default_def_parts
);
976 for_all_parms (set_parm_default_def_partition
, &arg
);
978 return parm_default_def_parts
;
981 /* Allocate and return a bitmap that has a bit set for each partition
982 that contains an undefined value. */
985 get_undefined_value_partitions (var_map map
)
987 bitmap undefined_value_parts
= BITMAP_ALLOC (NULL
);
989 for (unsigned int i
= 1; i
< num_ssa_names
; i
++)
991 tree var
= ssa_name (i
);
993 && !virtual_operand_p (var
)
994 && !has_zero_uses (var
)
995 && ssa_undefined_value_p (var
))
997 const int p
= var_to_partition (map
, var
);
998 if (p
!= NO_PARTITION
)
999 bitmap_set_bit (undefined_value_parts
, p
);
1003 return undefined_value_parts
;
1006 /* Given the out-of-ssa info object SA (with prepared partitions)
1007 eliminate all phi nodes in all basic blocks. Afterwards no
1008 basic block will have phi nodes anymore and there are possibly
1009 some RTL instructions inserted on edges. */
1012 expand_phi_nodes (struct ssaexpand
*sa
)
1015 elim_graph
g (sa
->map
);
1017 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
,
1018 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
1019 if (!gimple_seq_empty_p (phi_nodes (bb
)))
1023 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1024 eliminate_phi (e
, &g
);
1025 set_phi_nodes (bb
, NULL
);
1026 /* We can't redirect EH edges in RTL land, so we need to do this
1027 here. Redirection happens only when splitting is necessary,
1028 which it is only for critical edges, normally. For EH edges
1029 it might also be necessary when the successor has more than
1030 one predecessor. In that case the edge is either required to
1031 be fallthru (which EH edges aren't), or the predecessor needs
1032 to end with a jump (which again, isn't the case with EH edges).
1033 Hence, split all EH edges on which we inserted instructions
1034 and whose successor has multiple predecessors. */
1035 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
1037 if (e
->insns
.r
&& (e
->flags
& EDGE_EH
)
1038 && !single_pred_p (e
->dest
))
1040 rtx_insn
*insns
= e
->insns
.r
;
1043 bb
= split_edge (e
);
1044 single_pred_edge (bb
)->insns
.r
= insns
;
1053 /* Remove the ssa-names in the current function and translate them into normal
1054 compiler variables. PERFORM_TER is true if Temporary Expression Replacement
1055 should also be used. */
1058 remove_ssa_form (bool perform_ter
, struct ssaexpand
*sa
)
1060 bitmap values
= NULL
;
1063 for_all_parms (create_default_def
, NULL
);
1064 map
= init_var_map (num_ssa_names
);
1065 coalesce_ssa_name (map
);
1067 /* Return to viewing the variable list as just all reference variables after
1068 coalescing has been performed. */
1069 partition_view_normal (map
);
1071 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1073 fprintf (dump_file
, "After Coalescing:\n");
1074 dump_var_map (dump_file
, map
);
1079 values
= find_replaceable_exprs (map
);
1080 if (values
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
1081 dump_replaceable_exprs (dump_file
, values
);
1084 rewrite_trees (map
);
1087 sa
->values
= values
;
1088 sa
->partitions_for_parm_default_defs
= get_parm_default_def_partitions (map
);
1089 sa
->partitions_for_undefined_values
= get_undefined_value_partitions (map
);
1093 /* If not already done so for basic block BB, assign increasing uids
1094 to each of its instructions. */
1097 maybe_renumber_stmts_bb (basic_block bb
)
1100 gimple_stmt_iterator gsi
;
1105 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1107 gimple
*stmt
= gsi_stmt (gsi
);
1108 gimple_set_uid (stmt
, i
);
1114 /* Return true if we can determine that the SSA_NAMEs RESULT (a result
1115 of a PHI node) and ARG (one of its arguments) conflict. Return false
1116 otherwise, also when we simply aren't sure. */
1119 trivially_conflicts_p (basic_block bb
, tree result
, tree arg
)
1122 imm_use_iterator imm_iter
;
1123 gimple
*defa
= SSA_NAME_DEF_STMT (arg
);
1125 /* If ARG isn't defined in the same block it's too complicated for
1127 if (gimple_bb (defa
) != bb
)
1130 FOR_EACH_IMM_USE_FAST (use
, imm_iter
, result
)
1132 gimple
*use_stmt
= USE_STMT (use
);
1133 if (is_gimple_debug (use_stmt
))
1135 /* Now, if there's a use of RESULT that lies outside this basic block,
1136 then there surely is a conflict with ARG. */
1137 if (gimple_bb (use_stmt
) != bb
)
1139 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1141 /* The use now is in a real stmt of BB, so if ARG was defined
1142 in a PHI node (like RESULT) both conflict. */
1143 if (gimple_code (defa
) == GIMPLE_PHI
)
1145 maybe_renumber_stmts_bb (bb
);
1146 /* If the use of RESULT occurs after the definition of ARG,
1147 the two conflict too. */
1148 if (gimple_uid (defa
) < gimple_uid (use_stmt
))
1156 /* Search every PHI node for arguments associated with backedges which
1157 we can trivially determine will need a copy (the argument is either
1158 not an SSA_NAME or the argument has a different underlying variable
1159 than the PHI result).
1161 Insert a copy from the PHI argument to a new destination at the
1162 end of the block with the backedge to the top of the loop. Update
1163 the PHI argument to reference this new destination. */
1166 insert_backedge_copies (void)
1171 mark_dfs_back_edges ();
1173 FOR_EACH_BB_FN (bb
, cfun
)
1175 /* Mark block as possibly needing calculation of UIDs. */
1178 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1180 gphi
*phi
= gsi
.phi ();
1181 tree result
= gimple_phi_result (phi
);
1184 if (virtual_operand_p (result
))
1187 for (i
= 0; i
< gimple_phi_num_args (phi
); i
++)
1189 tree arg
= gimple_phi_arg_def (phi
, i
);
1190 edge e
= gimple_phi_arg_edge (phi
, i
);
1191 /* We are only interested in copies emitted on critical
1193 if (!(e
->flags
& EDGE_DFS_BACK
)
1194 || !EDGE_CRITICAL_P (e
))
1197 /* If the argument is not an SSA_NAME, then we will need a
1198 constant initialization. If the argument is an SSA_NAME then
1199 a copy statement may be needed. First handle the case
1200 where we cannot insert before the argument definition. */
1201 if (TREE_CODE (arg
) != SSA_NAME
1202 || (gimple_code (SSA_NAME_DEF_STMT (arg
)) == GIMPLE_PHI
1203 && trivially_conflicts_p (bb
, result
, arg
)))
1207 gimple
*last
= NULL
;
1208 gimple_stmt_iterator gsi2
;
1210 gsi2
= gsi_last_bb (gimple_phi_arg_edge (phi
, i
)->src
);
1211 if (!gsi_end_p (gsi2
))
1212 last
= gsi_stmt (gsi2
);
1214 /* In theory the only way we ought to get back to the
1215 start of a loop should be with a COND_EXPR or GOTO_EXPR.
1216 However, better safe than sorry.
1217 If the block ends with a control statement or
1218 something that might throw, then we have to
1219 insert this assignment before the last
1220 statement. Else insert it after the last statement. */
1221 if (last
&& stmt_ends_bb_p (last
))
1223 /* If the last statement in the block is the definition
1224 site of the PHI argument, then we can't insert
1225 anything after it. */
1226 if (TREE_CODE (arg
) == SSA_NAME
1227 && SSA_NAME_DEF_STMT (arg
) == last
)
1231 /* Create a new instance of the underlying variable of the
1233 name
= copy_ssa_name (result
);
1234 stmt
= gimple_build_assign (name
,
1235 gimple_phi_arg_def (phi
, i
));
1237 /* copy location if present. */
1238 if (gimple_phi_arg_has_location (phi
, i
))
1239 gimple_set_location (stmt
,
1240 gimple_phi_arg_location (phi
, i
));
1242 /* Insert the new statement into the block and update
1244 if (last
&& stmt_ends_bb_p (last
))
1245 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
1247 gsi_insert_after (&gsi2
, stmt
, GSI_NEW_STMT
);
1248 SET_PHI_ARG_DEF (phi
, i
, name
);
1250 /* Insert a copy before the definition of the backedge value
1251 and adjust all conflicting uses. */
1252 else if (trivially_conflicts_p (bb
, result
, arg
))
1254 gimple
*def
= SSA_NAME_DEF_STMT (arg
);
1255 if (gimple_nop_p (def
)
1256 || gimple_code (def
) == GIMPLE_PHI
)
1258 tree name
= copy_ssa_name (result
);
1259 gimple
*stmt
= gimple_build_assign (name
, result
);
1260 imm_use_iterator imm_iter
;
1262 /* The following matches trivially_conflicts_p. */
1263 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, result
)
1265 if (gimple_bb (use_stmt
) != bb
1266 || (gimple_code (use_stmt
) != GIMPLE_PHI
1267 && (maybe_renumber_stmts_bb (bb
), true)
1268 && gimple_uid (use_stmt
) > gimple_uid (def
)))
1271 FOR_EACH_IMM_USE_ON_STMT (use
, imm_iter
)
1272 SET_USE (use
, name
);
1275 gimple_stmt_iterator gsi
= gsi_for_stmt (def
);
1276 gsi_insert_before (&gsi
, stmt
, GSI_SAME_STMT
);
1281 /* Unmark this block again. */
1286 /* Free all memory associated with going out of SSA form. SA is
1287 the outof-SSA info object. */
1290 finish_out_of_ssa (struct ssaexpand
*sa
)
1292 free (sa
->partition_to_pseudo
);
1294 BITMAP_FREE (sa
->values
);
1295 delete_var_map (sa
->map
);
1296 BITMAP_FREE (sa
->partitions_for_parm_default_defs
);
1297 BITMAP_FREE (sa
->partitions_for_undefined_values
);
1298 memset (sa
, 0, sizeof *sa
);
1301 /* Take the current function out of SSA form, translating PHIs as described in
1302 R. Morgan, ``Building an Optimizing Compiler'',
1303 Butterworth-Heinemann, Boston, MA, 1998. pp 176-186. */
1306 rewrite_out_of_ssa (struct ssaexpand
*sa
)
1308 /* If elimination of a PHI requires inserting a copy on a backedge,
1309 then we will have to split the backedge which has numerous
1310 undesirable performance effects.
1312 A significant number of such cases can be handled here by inserting
1313 copies into the loop itself. */
1314 insert_backedge_copies ();
1317 /* Eliminate PHIs which are of no use, such as virtual or dead phis. */
1318 eliminate_useless_phis ();
1320 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1321 gimple_dump_cfg (dump_file
, dump_flags
& ~TDF_DETAILS
);
1323 remove_ssa_form (flag_tree_ter
, sa
);
1325 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1326 gimple_dump_cfg (dump_file
, dump_flags
& ~TDF_DETAILS
);