1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "basic-block.h"
30 #include "langhooks.h"
32 #include "tree-pass.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
40 #include "tree-inline.h"
41 #include "value-prof.h"
43 #include "tree-outof-ssa.h"
47 #include "regs.h" /* For reg_renumber. */
48 #include "insn-attr.h" /* For INSN_SCHEDULING. */
51 /* This variable holds information helping the rewriting of SSA trees
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt
;
59 static rtx
expand_debug_expr (tree
);
61 /* Return an expression tree corresponding to the RHS of GIMPLE
65 gimple_assign_rhs_to_tree (gimple stmt
)
68 enum gimple_rhs_class grhs_class
;
70 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
72 if (grhs_class
== GIMPLE_TERNARY_RHS
)
73 t
= build3 (gimple_assign_rhs_code (stmt
),
74 TREE_TYPE (gimple_assign_lhs (stmt
)),
75 gimple_assign_rhs1 (stmt
),
76 gimple_assign_rhs2 (stmt
),
77 gimple_assign_rhs3 (stmt
));
78 else if (grhs_class
== GIMPLE_BINARY_RHS
)
79 t
= build2 (gimple_assign_rhs_code (stmt
),
80 TREE_TYPE (gimple_assign_lhs (stmt
)),
81 gimple_assign_rhs1 (stmt
),
82 gimple_assign_rhs2 (stmt
));
83 else if (grhs_class
== GIMPLE_UNARY_RHS
)
84 t
= build1 (gimple_assign_rhs_code (stmt
),
85 TREE_TYPE (gimple_assign_lhs (stmt
)),
86 gimple_assign_rhs1 (stmt
));
87 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
89 t
= gimple_assign_rhs1 (stmt
);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
92 && gimple_location (stmt
) != EXPR_LOCATION (t
))
93 || (gimple_block (stmt
)
94 && currently_expanding_to_rtl
101 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
102 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
108 #ifndef STACK_ALIGNMENT_NEEDED
109 #define STACK_ALIGNMENT_NEEDED 1
112 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
114 /* Associate declaration T with storage space X. If T is no
115 SSA name this is exactly SET_DECL_RTL, otherwise make the
116 partition of T associated with X. */
118 set_rtl (tree t
, rtx x
)
120 if (TREE_CODE (t
) == SSA_NAME
)
122 SA
.partition_to_pseudo
[var_to_partition (SA
.map
, t
)] = x
;
124 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t
), x
);
125 /* For the benefit of debug information at -O0 (where vartracking
126 doesn't run) record the place also in the base DECL if it's
127 a normal variable (not a parameter). */
128 if (x
&& x
!= pc_rtx
&& TREE_CODE (SSA_NAME_VAR (t
)) == VAR_DECL
)
130 tree var
= SSA_NAME_VAR (t
);
131 /* If we don't yet have something recorded, just record it now. */
132 if (!DECL_RTL_SET_P (var
))
133 SET_DECL_RTL (var
, x
);
134 /* If we have it set already to "multiple places" don't
136 else if (DECL_RTL (var
) == pc_rtx
)
138 /* If we have something recorded and it's not the same place
139 as we want to record now, we have multiple partitions for the
140 same base variable, with different places. We can't just
141 randomly chose one, hence we have to say that we don't know.
142 This only happens with optimization, and there var-tracking
143 will figure out the right thing. */
144 else if (DECL_RTL (var
) != x
)
145 SET_DECL_RTL (var
, pc_rtx
);
152 /* This structure holds data relevant to one variable that will be
153 placed in a stack slot. */
159 /* Initially, the size of the variable. Later, the size of the partition,
160 if this variable becomes it's partition's representative. */
163 /* The *byte* alignment required for this variable. Or as, with the
164 size, the alignment for this partition. */
167 /* The partition representative. */
168 size_t representative
;
170 /* The next stack variable in the partition, or EOC. */
173 /* The numbers of conflicting stack variables. */
177 #define EOC ((size_t)-1)
179 /* We have an array of such objects while deciding allocation. */
180 static struct stack_var
*stack_vars
;
181 static size_t stack_vars_alloc
;
182 static size_t stack_vars_num
;
183 static struct pointer_map_t
*decl_to_stack_part
;
185 /* Conflict bitmaps go on this obstack. This allows us to destroy
186 all of them in one big sweep. */
187 static bitmap_obstack stack_var_bitmap_obstack
;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted
;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase
;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls
;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer
;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
210 align_local_variable (tree decl
)
212 unsigned int align
= LOCAL_DECL_ALIGNMENT (decl
);
213 DECL_ALIGN (decl
) = align
;
214 return align
/ BITS_PER_UNIT
;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
221 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
223 HOST_WIDE_INT offset
, new_frame_offset
;
225 new_frame_offset
= frame_offset
;
226 if (FRAME_GROWS_DOWNWARD
)
228 new_frame_offset
-= size
+ frame_phase
;
229 new_frame_offset
&= -align
;
230 new_frame_offset
+= frame_phase
;
231 offset
= new_frame_offset
;
235 new_frame_offset
-= frame_phase
;
236 new_frame_offset
+= align
- 1;
237 new_frame_offset
&= -align
;
238 new_frame_offset
+= frame_phase
;
239 offset
= new_frame_offset
;
240 new_frame_offset
+= size
;
242 frame_offset
= new_frame_offset
;
244 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
245 frame_offset
= offset
= 0;
250 /* Accumulate DECL into STACK_VARS. */
253 add_stack_var (tree decl
)
257 if (stack_vars_num
>= stack_vars_alloc
)
259 if (stack_vars_alloc
)
260 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
262 stack_vars_alloc
= 32;
264 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
266 if (!decl_to_stack_part
)
267 decl_to_stack_part
= pointer_map_create ();
269 v
= &stack_vars
[stack_vars_num
];
270 * (size_t *)pointer_map_insert (decl_to_stack_part
, decl
) = stack_vars_num
;
273 v
->size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl
)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
278 v
->alignb
= align_local_variable (SSAVAR (decl
));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v
->alignb
!= 0);
282 /* All variables are initially in their own partition. */
283 v
->representative
= stack_vars_num
;
286 /* All variables initially conflict with no other. */
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl
, pc_rtx
);
295 /* Make the decls associated with luid's X and Y conflict. */
298 add_stack_var_conflict (size_t x
, size_t y
)
300 struct stack_var
*a
= &stack_vars
[x
];
301 struct stack_var
*b
= &stack_vars
[y
];
303 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
305 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
306 bitmap_set_bit (a
->conflicts
, y
);
307 bitmap_set_bit (b
->conflicts
, x
);
310 /* Check whether the decls associated with luid's X and Y conflict. */
313 stack_var_conflict_p (size_t x
, size_t y
)
315 struct stack_var
*a
= &stack_vars
[x
];
316 struct stack_var
*b
= &stack_vars
[y
];
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
325 if (!a
->conflicts
|| !b
->conflicts
)
327 return bitmap_bit_p (a
->conflicts
, y
);
330 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
331 enter its partition number into bitmap DATA. */
334 visit_op (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
336 bitmap active
= (bitmap
)data
;
337 op
= get_base_address (op
);
340 && DECL_RTL_IF_SET (op
) == pc_rtx
)
342 size_t *v
= (size_t *) pointer_map_contains (decl_to_stack_part
, op
);
344 bitmap_set_bit (active
, *v
);
349 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
350 record conflicts between it and all currently active other partitions
354 visit_conflict (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
356 bitmap active
= (bitmap
)data
;
357 op
= get_base_address (op
);
360 && DECL_RTL_IF_SET (op
) == pc_rtx
)
363 (size_t *) pointer_map_contains (decl_to_stack_part
, op
);
364 if (v
&& bitmap_set_bit (active
, *v
))
369 gcc_assert (num
< stack_vars_num
);
370 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
371 add_stack_var_conflict (num
, i
);
377 /* Helper routine for add_scope_conflicts, calculating the active partitions
378 at the end of BB, leaving the result in WORK. We're called to generate
379 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
383 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
387 gimple_stmt_iterator gsi
;
388 bool (*visit
)(gimple
, tree
, void *);
391 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
392 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
396 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
398 gimple stmt
= gsi_stmt (gsi
);
399 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
401 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
403 gimple stmt
= gsi_stmt (gsi
);
405 if (gimple_clobber_p (stmt
))
407 tree lhs
= gimple_assign_lhs (stmt
);
409 /* Nested function lowering might introduce LHSs
410 that are COMPONENT_REFs. */
411 if (TREE_CODE (lhs
) != VAR_DECL
)
413 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
415 pointer_map_contains (decl_to_stack_part
, lhs
)))
416 bitmap_clear_bit (work
, *v
);
418 else if (!is_gimple_debug (stmt
))
421 && visit
== visit_op
)
423 /* If this is the first real instruction in this BB we need
424 to add conflicts for everything live at this point now.
425 Unlike classical liveness for named objects we can't
426 rely on seeing a def/use of the names we're interested in.
427 There might merely be indirect loads/stores. We'd not add any
428 conflicts for such partitions. */
431 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
433 struct stack_var
*a
= &stack_vars
[i
];
435 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
436 bitmap_ior_into (a
->conflicts
, work
);
438 visit
= visit_conflict
;
440 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
445 /* Generate stack partition conflicts between all partitions that are
446 simultaneously live. */
449 add_scope_conflicts (void)
453 bitmap work
= BITMAP_ALLOC (NULL
);
457 /* We approximate the live range of a stack variable by taking the first
458 mention of its name as starting point(s), and by the end-of-scope
459 death clobber added by gimplify as ending point(s) of the range.
460 This overapproximates in the case we for instance moved an address-taken
461 operation upward, without also moving a dereference to it upwards.
462 But it's conservatively correct as a variable never can hold values
463 before its name is mentioned at least once.
465 We then do a mostly classical bitmap liveness algorithm. */
468 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
470 rpo
= XNEWVEC (int, last_basic_block
);
471 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
478 for (i
= 0; i
< n_bbs
; i
++)
481 bb
= BASIC_BLOCK (rpo
[i
]);
482 active
= (bitmap
)bb
->aux
;
483 add_scope_conflicts_1 (bb
, work
, false);
484 if (bitmap_ior_into (active
, work
))
490 add_scope_conflicts_1 (bb
, work
, true);
495 BITMAP_FREE (bb
->aux
);
498 /* A subroutine of partition_stack_vars. A comparison function for qsort,
499 sorting an array of indices by the properties of the object. */
502 stack_var_cmp (const void *a
, const void *b
)
504 size_t ia
= *(const size_t *)a
;
505 size_t ib
= *(const size_t *)b
;
506 unsigned int aligna
= stack_vars
[ia
].alignb
;
507 unsigned int alignb
= stack_vars
[ib
].alignb
;
508 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
509 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
510 tree decla
= stack_vars
[ia
].decl
;
511 tree declb
= stack_vars
[ib
].decl
;
513 unsigned int uida
, uidb
;
515 /* Primary compare on "large" alignment. Large comes first. */
516 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
517 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
518 if (largea
!= largeb
)
519 return (int)largeb
- (int)largea
;
521 /* Secondary compare on size, decreasing */
527 /* Tertiary compare on true alignment, decreasing. */
533 /* Final compare on ID for sort stability, increasing.
534 Two SSA names are compared by their version, SSA names come before
535 non-SSA names, and two normal decls are compared by their DECL_UID. */
536 if (TREE_CODE (decla
) == SSA_NAME
)
538 if (TREE_CODE (declb
) == SSA_NAME
)
539 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
543 else if (TREE_CODE (declb
) == SSA_NAME
)
546 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
555 /* If the points-to solution *PI points to variables that are in a partition
556 together with other variables add all partition members to the pointed-to
560 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
561 struct pointer_map_t
*decls_to_partitions
,
562 struct pointer_set_t
*visited
, bitmap temp
)
570 /* The pointed-to vars bitmap is shared, it is enough to
572 || pointer_set_insert (visited
, pt
->vars
))
577 /* By using a temporary bitmap to store all members of the partitions
578 we have to add we make sure to visit each of the partitions only
580 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
582 || !bitmap_bit_p (temp
, i
))
583 && (part
= (bitmap
*) pointer_map_contains (decls_to_partitions
,
584 (void *)(size_t) i
)))
585 bitmap_ior_into (temp
, *part
);
586 if (!bitmap_empty_p (temp
))
587 bitmap_ior_into (pt
->vars
, temp
);
590 /* Update points-to sets based on partition info, so we can use them on RTL.
591 The bitmaps representing stack partitions will be saved until expand,
592 where partitioned decls used as bases in memory expressions will be
596 update_alias_info_with_stack_vars (void)
598 struct pointer_map_t
*decls_to_partitions
= NULL
;
600 tree var
= NULL_TREE
;
602 for (i
= 0; i
< stack_vars_num
; i
++)
606 struct ptr_info_def
*pi
;
608 /* Not interested in partitions with single variable. */
609 if (stack_vars
[i
].representative
!= i
610 || stack_vars
[i
].next
== EOC
)
613 if (!decls_to_partitions
)
615 decls_to_partitions
= pointer_map_create ();
616 cfun
->gimple_df
->decls_to_pointers
= pointer_map_create ();
619 /* Create an SSA_NAME that points to the partition for use
620 as base during alias-oracle queries on RTL for bases that
621 have been partitioned. */
622 if (var
== NULL_TREE
)
623 var
= create_tmp_var (ptr_type_node
, NULL
);
624 name
= make_ssa_name (var
, NULL
);
626 /* Create bitmaps representing partitions. They will be used for
627 points-to sets later, so use GGC alloc. */
628 part
= BITMAP_GGC_ALLOC ();
629 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
631 tree decl
= stack_vars
[j
].decl
;
632 unsigned int uid
= DECL_PT_UID (decl
);
633 bitmap_set_bit (part
, uid
);
634 *((bitmap
*) pointer_map_insert (decls_to_partitions
,
635 (void *)(size_t) uid
)) = part
;
636 *((tree
*) pointer_map_insert (cfun
->gimple_df
->decls_to_pointers
,
638 if (TREE_ADDRESSABLE (decl
))
639 TREE_ADDRESSABLE (name
) = 1;
642 /* Make the SSA name point to all partition members. */
643 pi
= get_ptr_info (name
);
644 pt_solution_set (&pi
->pt
, part
, false);
647 /* Make all points-to sets that contain one member of a partition
648 contain all members of the partition. */
649 if (decls_to_partitions
)
652 struct pointer_set_t
*visited
= pointer_set_create ();
653 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
655 for (i
= 1; i
< num_ssa_names
; i
++)
657 tree name
= ssa_name (i
);
658 struct ptr_info_def
*pi
;
661 && POINTER_TYPE_P (TREE_TYPE (name
))
662 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
663 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
667 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
668 decls_to_partitions
, visited
, temp
);
670 pointer_set_destroy (visited
);
671 pointer_map_destroy (decls_to_partitions
);
676 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
677 partitioning algorithm. Partitions A and B are known to be non-conflicting.
678 Merge them into a single partition A. */
681 union_stack_vars (size_t a
, size_t b
)
683 struct stack_var
*vb
= &stack_vars
[b
];
687 gcc_assert (stack_vars
[b
].next
== EOC
);
688 /* Add B to A's partition. */
689 stack_vars
[b
].next
= stack_vars
[a
].next
;
690 stack_vars
[b
].representative
= a
;
691 stack_vars
[a
].next
= b
;
693 /* Update the required alignment of partition A to account for B. */
694 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
695 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
697 /* Update the interference graph and merge the conflicts. */
700 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
701 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
702 BITMAP_FREE (vb
->conflicts
);
706 /* A subroutine of expand_used_vars. Binpack the variables into
707 partitions constrained by the interference graph. The overall
708 algorithm used is as follows:
710 Sort the objects by size in descending order.
715 Look for the largest non-conflicting object B with size <= S.
722 partition_stack_vars (void)
724 size_t si
, sj
, n
= stack_vars_num
;
726 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
727 for (si
= 0; si
< n
; ++si
)
728 stack_vars_sorted
[si
] = si
;
733 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
735 for (si
= 0; si
< n
; ++si
)
737 size_t i
= stack_vars_sorted
[si
];
738 unsigned int ialign
= stack_vars
[i
].alignb
;
739 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
741 /* Ignore objects that aren't partition representatives. If we
742 see a var that is not a partition representative, it must
743 have been merged earlier. */
744 if (stack_vars
[i
].representative
!= i
)
747 for (sj
= si
+ 1; sj
< n
; ++sj
)
749 size_t j
= stack_vars_sorted
[sj
];
750 unsigned int jalign
= stack_vars
[j
].alignb
;
751 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
753 /* Ignore objects that aren't partition representatives. */
754 if (stack_vars
[j
].representative
!= j
)
757 /* Do not mix objects of "small" (supported) alignment
758 and "large" (unsupported) alignment. */
759 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
760 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
763 /* For Address Sanitizer do not mix objects with different
764 sizes, as the shorter vars wouldn't be adequately protected.
765 Don't do that for "large" (unsupported) alignment objects,
766 those aren't protected anyway. */
767 if ((flag_sanitize
& SANITIZE_ADDRESS
) && isize
!= jsize
768 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
771 /* Ignore conflicting objects. */
772 if (stack_var_conflict_p (i
, j
))
775 /* UNION the objects, placing J at OFFSET. */
776 union_stack_vars (i
, j
);
780 update_alias_info_with_stack_vars ();
783 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
786 dump_stack_var_partition (void)
788 size_t si
, i
, j
, n
= stack_vars_num
;
790 for (si
= 0; si
< n
; ++si
)
792 i
= stack_vars_sorted
[si
];
794 /* Skip variables that aren't partition representatives, for now. */
795 if (stack_vars
[i
].representative
!= i
)
798 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
799 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
800 stack_vars
[i
].alignb
);
802 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
804 fputc ('\t', dump_file
);
805 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
807 fputc ('\n', dump_file
);
811 /* Assign rtl to DECL at BASE + OFFSET. */
814 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
815 HOST_WIDE_INT offset
)
820 /* If this fails, we've overflowed the stack frame. Error nicely? */
821 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
823 x
= plus_constant (Pmode
, base
, offset
);
824 x
= gen_rtx_MEM (DECL_MODE (SSAVAR (decl
)), x
);
826 if (TREE_CODE (decl
) != SSA_NAME
)
828 /* Set alignment we actually gave this decl if it isn't an SSA name.
829 If it is we generate stack slots only accidentally so it isn't as
830 important, we'll simply use the alignment that is already set. */
831 if (base
== virtual_stack_vars_rtx
)
832 offset
-= frame_phase
;
833 align
= offset
& -offset
;
834 align
*= BITS_PER_UNIT
;
835 if (align
== 0 || align
> base_align
)
838 /* One would think that we could assert that we're not decreasing
839 alignment here, but (at least) the i386 port does exactly this
840 via the MINIMUM_ALIGNMENT hook. */
842 DECL_ALIGN (decl
) = align
;
843 DECL_USER_ALIGN (decl
) = 0;
846 set_mem_attributes (x
, SSAVAR (decl
), true);
850 struct stack_vars_data
852 /* Vector of offset pairs, always end of some padding followed
853 by start of the padding that needs Address Sanitizer protection.
854 The vector is in reversed, highest offset pairs come first. */
855 vec
<HOST_WIDE_INT
> asan_vec
;
857 /* Vector of partition representative decls in between the paddings. */
858 vec
<tree
> asan_decl_vec
;
861 /* A subroutine of expand_used_vars. Give each partition representative
862 a unique location within the stack frame. Update each partition member
863 with that location. */
866 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
868 size_t si
, i
, j
, n
= stack_vars_num
;
869 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
870 rtx large_base
= NULL
;
871 unsigned large_align
= 0;
874 /* Determine if there are any variables requiring "large" alignment.
875 Since these are dynamically allocated, we only process these if
876 no predicate involved. */
877 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
878 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
880 /* Find the total size of these variables. */
881 for (si
= 0; si
< n
; ++si
)
885 i
= stack_vars_sorted
[si
];
886 alignb
= stack_vars
[i
].alignb
;
888 /* Stop when we get to the first decl with "small" alignment. */
889 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
892 /* Skip variables that aren't partition representatives. */
893 if (stack_vars
[i
].representative
!= i
)
896 /* Skip variables that have already had rtl assigned. See also
897 add_stack_var where we perpetrate this pc_rtx hack. */
898 decl
= stack_vars
[i
].decl
;
899 if ((TREE_CODE (decl
) == SSA_NAME
900 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
901 : DECL_RTL (decl
)) != pc_rtx
)
904 large_size
+= alignb
- 1;
905 large_size
&= -(HOST_WIDE_INT
)alignb
;
906 large_size
+= stack_vars
[i
].size
;
909 /* If there were any, allocate space. */
911 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
915 for (si
= 0; si
< n
; ++si
)
918 unsigned base_align
, alignb
;
919 HOST_WIDE_INT offset
;
921 i
= stack_vars_sorted
[si
];
923 /* Skip variables that aren't partition representatives, for now. */
924 if (stack_vars
[i
].representative
!= i
)
927 /* Skip variables that have already had rtl assigned. See also
928 add_stack_var where we perpetrate this pc_rtx hack. */
929 decl
= stack_vars
[i
].decl
;
930 if ((TREE_CODE (decl
) == SSA_NAME
931 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
932 : DECL_RTL (decl
)) != pc_rtx
)
935 /* Check the predicate to see whether this variable should be
936 allocated in this pass. */
937 if (pred
&& !pred (i
))
940 alignb
= stack_vars
[i
].alignb
;
941 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
943 if ((flag_sanitize
& SANITIZE_ADDRESS
) && pred
)
945 HOST_WIDE_INT prev_offset
= frame_offset
;
946 tree repr_decl
= NULL_TREE
;
949 = alloc_stack_frame_space (stack_vars
[i
].size
950 + ASAN_RED_ZONE_SIZE
,
951 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
952 data
->asan_vec
.safe_push (prev_offset
);
953 data
->asan_vec
.safe_push (offset
+ stack_vars
[i
].size
);
954 /* Find best representative of the partition.
955 Prefer those with DECL_NAME, even better
956 satisfying asan_protect_stack_decl predicate. */
957 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
958 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
959 && DECL_NAME (stack_vars
[j
].decl
))
961 repr_decl
= stack_vars
[j
].decl
;
964 else if (repr_decl
== NULL_TREE
965 && DECL_P (stack_vars
[j
].decl
)
966 && DECL_NAME (stack_vars
[j
].decl
))
967 repr_decl
= stack_vars
[j
].decl
;
968 if (repr_decl
== NULL_TREE
)
969 repr_decl
= stack_vars
[i
].decl
;
970 data
->asan_decl_vec
.safe_push (repr_decl
);
973 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
974 base
= virtual_stack_vars_rtx
;
975 base_align
= crtl
->max_used_stack_slot_alignment
;
979 /* Large alignment is only processed in the last pass. */
982 gcc_assert (large_base
!= NULL
);
984 large_alloc
+= alignb
- 1;
985 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
986 offset
= large_alloc
;
987 large_alloc
+= stack_vars
[i
].size
;
990 base_align
= large_align
;
993 /* Create rtl for each variable based on their location within the
995 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
997 expand_one_stack_var_at (stack_vars
[j
].decl
,
1003 gcc_assert (large_alloc
== large_size
);
1006 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1007 static HOST_WIDE_INT
1008 account_stack_vars (void)
1010 size_t si
, j
, i
, n
= stack_vars_num
;
1011 HOST_WIDE_INT size
= 0;
1013 for (si
= 0; si
< n
; ++si
)
1015 i
= stack_vars_sorted
[si
];
1017 /* Skip variables that aren't partition representatives, for now. */
1018 if (stack_vars
[i
].representative
!= i
)
1021 size
+= stack_vars
[i
].size
;
1022 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1023 set_rtl (stack_vars
[j
].decl
, NULL
);
1028 /* A subroutine of expand_one_var. Called to immediately assign rtl
1029 to a variable to be allocated in the stack frame. */
1032 expand_one_stack_var (tree var
)
1034 HOST_WIDE_INT size
, offset
;
1035 unsigned byte_align
;
1037 size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var
)), 1);
1038 byte_align
= align_local_variable (SSAVAR (var
));
1040 /* We handle highly aligned variables in expand_stack_vars. */
1041 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1043 offset
= alloc_stack_frame_space (size
, byte_align
);
1045 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1046 crtl
->max_used_stack_slot_alignment
, offset
);
1049 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1050 that will reside in a hard register. */
1053 expand_one_hard_reg_var (tree var
)
1055 rest_of_decl_compilation (var
, 0, 0);
1058 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1059 that will reside in a pseudo register. */
1062 expand_one_register_var (tree var
)
1064 tree decl
= SSAVAR (var
);
1065 tree type
= TREE_TYPE (decl
);
1066 enum machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1067 rtx x
= gen_reg_rtx (reg_mode
);
1071 /* Note if the object is a user variable. */
1072 if (!DECL_ARTIFICIAL (decl
))
1075 if (POINTER_TYPE_P (type
))
1076 mark_reg_pointer (x
, get_pointer_alignment (var
));
1079 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1080 has some associated error, e.g. its type is error-mark. We just need
1081 to pick something that won't crash the rest of the compiler. */
1084 expand_one_error_var (tree var
)
1086 enum machine_mode mode
= DECL_MODE (var
);
1089 if (mode
== BLKmode
)
1090 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1091 else if (mode
== VOIDmode
)
1094 x
= gen_reg_rtx (mode
);
1096 SET_DECL_RTL (var
, x
);
1099 /* A subroutine of expand_one_var. VAR is a variable that will be
1100 allocated to the local stack frame. Return true if we wish to
1101 add VAR to STACK_VARS so that it will be coalesced with other
1102 variables. Return false to allocate VAR immediately.
1104 This function is used to reduce the number of variables considered
1105 for coalescing, which reduces the size of the quadratic problem. */
1108 defer_stack_allocation (tree var
, bool toplevel
)
1110 /* If stack protection is enabled, *all* stack variables must be deferred,
1111 so that we can re-order the strings to the top of the frame.
1112 Similarly for Address Sanitizer. */
1113 if (flag_stack_protect
|| (flag_sanitize
& SANITIZE_ADDRESS
))
1116 /* We handle "large" alignment via dynamic allocation. We want to handle
1117 this extra complication in only one place, so defer them. */
1118 if (DECL_ALIGN (var
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
1121 /* Variables in the outermost scope automatically conflict with
1122 every other variable. The only reason to want to defer them
1123 at all is that, after sorting, we can more efficiently pack
1124 small variables in the stack frame. Continue to defer at -O2. */
1125 if (toplevel
&& optimize
< 2)
1128 /* Without optimization, *most* variables are allocated from the
1129 stack, which makes the quadratic problem large exactly when we
1130 want compilation to proceed as quickly as possible. On the
1131 other hand, we don't want the function's stack frame size to
1132 get completely out of hand. So we avoid adding scalars and
1133 "small" aggregates to the list at all. */
1135 && (tree_low_cst (DECL_SIZE_UNIT (var
), 1)
1136 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
)))
1142 /* A subroutine of expand_used_vars. Expand one variable according to
1143 its flavor. Variables to be placed on the stack are not actually
1144 expanded yet, merely recorded.
1145 When REALLY_EXPAND is false, only add stack values to be allocated.
1146 Return stack usage this variable is supposed to take.
1149 static HOST_WIDE_INT
1150 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1152 unsigned int align
= BITS_PER_UNIT
;
1157 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
1159 /* Because we don't know if VAR will be in register or on stack,
1160 we conservatively assume it will be on stack even if VAR is
1161 eventually put into register after RA pass. For non-automatic
1162 variables, which won't be on stack, we collect alignment of
1163 type and ignore user specified alignment. */
1164 if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1165 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1166 TYPE_MODE (TREE_TYPE (var
)),
1167 TYPE_ALIGN (TREE_TYPE (var
)));
1168 else if (DECL_HAS_VALUE_EXPR_P (var
)
1169 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1170 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1171 or variables which were assigned a stack slot already by
1172 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1173 changed from the offset chosen to it. */
1174 align
= crtl
->stack_alignment_estimated
;
1176 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1178 /* If the variable alignment is very large we'll dynamicaly allocate
1179 it, which means that in-frame portion is just a pointer. */
1180 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1181 align
= POINTER_SIZE
;
1184 if (SUPPORTS_STACK_ALIGNMENT
1185 && crtl
->stack_alignment_estimated
< align
)
1187 /* stack_alignment_estimated shouldn't change after stack
1188 realign decision made */
1189 gcc_assert (!crtl
->stack_realign_processed
);
1190 crtl
->stack_alignment_estimated
= align
;
1193 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1194 So here we only make sure stack_alignment_needed >= align. */
1195 if (crtl
->stack_alignment_needed
< align
)
1196 crtl
->stack_alignment_needed
= align
;
1197 if (crtl
->max_used_stack_slot_alignment
< align
)
1198 crtl
->max_used_stack_slot_alignment
= align
;
1200 if (TREE_CODE (origvar
) == SSA_NAME
)
1202 gcc_assert (TREE_CODE (var
) != VAR_DECL
1203 || (!DECL_EXTERNAL (var
)
1204 && !DECL_HAS_VALUE_EXPR_P (var
)
1205 && !TREE_STATIC (var
)
1206 && TREE_TYPE (var
) != error_mark_node
1207 && !DECL_HARD_REGISTER (var
)
1210 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1212 else if (DECL_EXTERNAL (var
))
1214 else if (DECL_HAS_VALUE_EXPR_P (var
))
1216 else if (TREE_STATIC (var
))
1218 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1220 else if (TREE_TYPE (var
) == error_mark_node
)
1223 expand_one_error_var (var
);
1225 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1228 expand_one_hard_reg_var (var
);
1230 else if (use_register_for_decl (var
))
1233 expand_one_register_var (origvar
);
1235 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1237 /* Reject variables which cover more than half of the address-space. */
1240 error ("size of variable %q+D is too large", var
);
1241 expand_one_error_var (var
);
1244 else if (defer_stack_allocation (var
, toplevel
))
1245 add_stack_var (origvar
);
1249 expand_one_stack_var (origvar
);
1250 return tree_low_cst (DECL_SIZE_UNIT (var
), 1);
1255 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1256 expanding variables. Those variables that can be put into registers
1257 are allocated pseudos; those that can't are put on the stack.
1259 TOPLEVEL is true if this is the outermost BLOCK. */
1262 expand_used_vars_for_block (tree block
, bool toplevel
)
1266 /* Expand all variables at this level. */
1267 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1269 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1270 || !DECL_NONSHAREABLE (t
)))
1271 expand_one_var (t
, toplevel
, true);
1273 /* Expand all variables at containing levels. */
1274 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1275 expand_used_vars_for_block (t
, false);
1278 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1279 and clear TREE_USED on all local variables. */
1282 clear_tree_used (tree block
)
1286 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1287 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1288 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1289 || !DECL_NONSHAREABLE (t
))
1292 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1293 clear_tree_used (t
);
1297 SPCT_FLAG_DEFAULT
= 1,
1299 SPCT_FLAG_STRONG
= 3
1302 /* Examine TYPE and determine a bit mask of the following features. */
1304 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1305 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1306 #define SPCT_HAS_ARRAY 4
1307 #define SPCT_HAS_AGGREGATE 8
1310 stack_protect_classify_type (tree type
)
1312 unsigned int ret
= 0;
1315 switch (TREE_CODE (type
))
1318 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1319 if (t
== char_type_node
1320 || t
== signed_char_type_node
1321 || t
== unsigned_char_type_node
)
1323 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1324 unsigned HOST_WIDE_INT len
;
1326 if (!TYPE_SIZE_UNIT (type
)
1327 || !host_integerp (TYPE_SIZE_UNIT (type
), 1))
1330 len
= tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
1333 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1335 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1338 ret
= SPCT_HAS_ARRAY
;
1342 case QUAL_UNION_TYPE
:
1344 ret
= SPCT_HAS_AGGREGATE
;
1345 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1346 if (TREE_CODE (t
) == FIELD_DECL
)
1347 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1357 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1358 part of the local stack frame. Remember if we ever return nonzero for
1359 any variable in this function. The return value is the phase number in
1360 which the variable should be allocated. */
1363 stack_protect_decl_phase (tree decl
)
1365 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1368 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1369 has_short_buffer
= true;
1371 if (flag_stack_protect
== SPCT_FLAG_ALL
1372 || flag_stack_protect
== SPCT_FLAG_STRONG
)
1374 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1375 && !(bits
& SPCT_HAS_AGGREGATE
))
1377 else if (bits
& SPCT_HAS_ARRAY
)
1381 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1384 has_protected_decls
= true;
1389 /* Two helper routines that check for phase 1 and phase 2. These are used
1390 as callbacks for expand_stack_vars. */
1393 stack_protect_decl_phase_1 (size_t i
)
1395 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1399 stack_protect_decl_phase_2 (size_t i
)
1401 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1404 /* And helper function that checks for asan phase (with stack protector
1405 it is phase 3). This is used as callback for expand_stack_vars.
1406 Returns true if any of the vars in the partition need to be protected. */
1409 asan_decl_phase_3 (size_t i
)
1413 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1415 i
= stack_vars
[i
].next
;
1420 /* Ensure that variables in different stack protection phases conflict
1421 so that they are not merged and share the same stack slot. */
1424 add_stack_protection_conflicts (void)
1426 size_t i
, j
, n
= stack_vars_num
;
1427 unsigned char *phase
;
1429 phase
= XNEWVEC (unsigned char, n
);
1430 for (i
= 0; i
< n
; ++i
)
1431 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1433 for (i
= 0; i
< n
; ++i
)
1435 unsigned char ph_i
= phase
[i
];
1436 for (j
= i
+ 1; j
< n
; ++j
)
1437 if (ph_i
!= phase
[j
])
1438 add_stack_var_conflict (i
, j
);
1444 /* Create a decl for the guard at the top of the stack frame. */
1447 create_stack_guard (void)
1449 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1450 VAR_DECL
, NULL
, ptr_type_node
);
1451 TREE_THIS_VOLATILE (guard
) = 1;
1452 TREE_USED (guard
) = 1;
1453 expand_one_stack_var (guard
);
1454 crtl
->stack_protect_guard
= guard
;
1457 /* Prepare for expanding variables. */
1459 init_vars_expansion (void)
1461 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1462 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1464 /* A map from decl to stack partition. */
1465 decl_to_stack_part
= pointer_map_create ();
1467 /* Initialize local stack smashing state. */
1468 has_protected_decls
= false;
1469 has_short_buffer
= false;
1472 /* Free up stack variable graph data. */
1474 fini_vars_expansion (void)
1476 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1478 XDELETEVEC (stack_vars
);
1479 if (stack_vars_sorted
)
1480 XDELETEVEC (stack_vars_sorted
);
1482 stack_vars_sorted
= NULL
;
1483 stack_vars_alloc
= stack_vars_num
= 0;
1484 pointer_map_destroy (decl_to_stack_part
);
1485 decl_to_stack_part
= NULL
;
1488 /* Make a fair guess for the size of the stack frame of the function
1489 in NODE. This doesn't have to be exact, the result is only used in
1490 the inline heuristics. So we don't want to run the full stack var
1491 packing algorithm (which is quadratic in the number of stack vars).
1492 Instead, we calculate the total size of all stack vars. This turns
1493 out to be a pretty fair estimate -- packing of stack vars doesn't
1494 happen very often. */
1497 estimated_stack_frame_size (struct cgraph_node
*node
)
1499 HOST_WIDE_INT size
= 0;
1502 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
1506 init_vars_expansion ();
1508 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1509 if (auto_var_in_fn_p (var
, fn
->decl
))
1510 size
+= expand_one_var (var
, true, false);
1512 if (stack_vars_num
> 0)
1514 /* Fake sorting the stack vars for account_stack_vars (). */
1515 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1516 for (i
= 0; i
< stack_vars_num
; ++i
)
1517 stack_vars_sorted
[i
] = i
;
1518 size
+= account_stack_vars ();
1521 fini_vars_expansion ();
1526 /* Helper routine to check if a record or union contains an array field. */
1529 record_or_union_type_has_array_p (const_tree tree_type
)
1531 tree fields
= TYPE_FIELDS (tree_type
);
1534 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1535 if (TREE_CODE (f
) == FIELD_DECL
)
1537 tree field_type
= TREE_TYPE (f
);
1538 if (RECORD_OR_UNION_TYPE_P (field_type
)
1539 && record_or_union_type_has_array_p (field_type
))
1541 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1547 /* Expand all variables used in the function. */
1550 expand_used_vars (void)
1552 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1553 vec
<tree
> maybe_local_decls
= vNULL
;
1554 rtx var_end_seq
= NULL_RTX
;
1555 struct pointer_map_t
*ssa_name_decls
;
1558 bool gen_stack_protect_signal
= false;
1560 /* Compute the phase of the stack frame for this function. */
1562 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1563 int off
= STARTING_FRAME_OFFSET
% align
;
1564 frame_phase
= off
? align
- off
: 0;
1567 /* Set TREE_USED on all variables in the local_decls. */
1568 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1569 TREE_USED (var
) = 1;
1570 /* Clear TREE_USED on all variables associated with a block scope. */
1571 clear_tree_used (DECL_INITIAL (current_function_decl
));
1573 init_vars_expansion ();
1575 ssa_name_decls
= pointer_map_create ();
1576 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
1578 tree var
= partition_to_var (SA
.map
, i
);
1580 gcc_assert (!virtual_operand_p (var
));
1582 /* Assign decls to each SSA name partition, share decls for partitions
1583 we could have coalesced (those with the same type). */
1584 if (SSA_NAME_VAR (var
) == NULL_TREE
)
1586 void **slot
= pointer_map_insert (ssa_name_decls
, TREE_TYPE (var
));
1588 *slot
= (void *) create_tmp_reg (TREE_TYPE (var
), NULL
);
1589 replace_ssa_name_symbol (var
, (tree
) *slot
);
1592 if (TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
1593 expand_one_var (var
, true, true);
1596 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1597 contain the default def (representing the parm or result itself)
1598 we don't do anything here. But those which don't contain the
1599 default def (representing a temporary based on the parm/result)
1600 we need to allocate space just like for normal VAR_DECLs. */
1601 if (!bitmap_bit_p (SA
.partition_has_default_def
, i
))
1603 expand_one_var (var
, true, true);
1604 gcc_assert (SA
.partition_to_pseudo
[i
]);
1608 pointer_map_destroy (ssa_name_decls
);
1610 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
1611 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1612 if (!is_global_var (var
))
1614 tree var_type
= TREE_TYPE (var
);
1615 /* Examine local referenced variables that have their addresses taken,
1616 contain an array, or are arrays. */
1617 if (TREE_CODE (var
) == VAR_DECL
1618 && (TREE_CODE (var_type
) == ARRAY_TYPE
1619 || TREE_ADDRESSABLE (var
)
1620 || (RECORD_OR_UNION_TYPE_P (var_type
)
1621 && record_or_union_type_has_array_p (var_type
))))
1623 gen_stack_protect_signal
= true;
1628 /* At this point all variables on the local_decls with TREE_USED
1629 set are not associated with any block scope. Lay them out. */
1631 len
= vec_safe_length (cfun
->local_decls
);
1632 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1634 bool expand_now
= false;
1636 /* Expanded above already. */
1637 if (is_gimple_reg (var
))
1639 TREE_USED (var
) = 0;
1642 /* We didn't set a block for static or extern because it's hard
1643 to tell the difference between a global variable (re)declared
1644 in a local scope, and one that's really declared there to
1645 begin with. And it doesn't really matter much, since we're
1646 not giving them stack space. Expand them now. */
1647 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1650 /* If the variable is not associated with any block, then it
1651 was created by the optimizers, and could be live anywhere
1653 else if (TREE_USED (var
))
1656 /* Finally, mark all variables on the list as used. We'll use
1657 this in a moment when we expand those associated with scopes. */
1658 TREE_USED (var
) = 1;
1661 expand_one_var (var
, true, true);
1664 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
1666 rtx rtl
= DECL_RTL_IF_SET (var
);
1668 /* Keep artificial non-ignored vars in cfun->local_decls
1669 chain until instantiate_decls. */
1670 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1671 add_local_decl (cfun
, var
);
1672 else if (rtl
== NULL_RTX
)
1673 /* If rtl isn't set yet, which can happen e.g. with
1674 -fstack-protector, retry before returning from this
1676 maybe_local_decls
.safe_push (var
);
1680 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1682 +-----------------+-----------------+
1683 | ...processed... | ...duplicates...|
1684 +-----------------+-----------------+
1686 +-- LEN points here.
1688 We just want the duplicates, as those are the artificial
1689 non-ignored vars that we want to keep until instantiate_decls.
1690 Move them down and truncate the array. */
1691 if (!vec_safe_is_empty (cfun
->local_decls
))
1692 cfun
->local_decls
->block_remove (0, len
);
1694 /* At this point, all variables within the block tree with TREE_USED
1695 set are actually used by the optimized function. Lay them out. */
1696 expand_used_vars_for_block (outer_block
, true);
1698 if (stack_vars_num
> 0)
1700 add_scope_conflicts ();
1702 /* If stack protection is enabled, we don't share space between
1703 vulnerable data and non-vulnerable data. */
1704 if (flag_stack_protect
)
1705 add_stack_protection_conflicts ();
1707 /* Now that we have collected all stack variables, and have computed a
1708 minimal interference graph, attempt to save some stack space. */
1709 partition_stack_vars ();
1711 dump_stack_var_partition ();
1714 switch (flag_stack_protect
)
1717 create_stack_guard ();
1720 case SPCT_FLAG_STRONG
:
1721 if (gen_stack_protect_signal
1722 || cfun
->calls_alloca
|| has_protected_decls
)
1723 create_stack_guard ();
1726 case SPCT_FLAG_DEFAULT
:
1727 if (cfun
->calls_alloca
|| has_protected_decls
)
1728 create_stack_guard ();
1735 /* Assign rtl to each variable based on these partitions. */
1736 if (stack_vars_num
> 0)
1738 struct stack_vars_data data
;
1740 data
.asan_vec
= vNULL
;
1741 data
.asan_decl_vec
= vNULL
;
1743 /* Reorder decls to be protected by iterating over the variables
1744 array multiple times, and allocating out of each phase in turn. */
1745 /* ??? We could probably integrate this into the qsort we did
1746 earlier, such that we naturally see these variables first,
1747 and thus naturally allocate things in the right order. */
1748 if (has_protected_decls
)
1750 /* Phase 1 contains only character arrays. */
1751 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
1753 /* Phase 2 contains other kinds of arrays. */
1754 if (flag_stack_protect
== 2)
1755 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
1758 if (flag_sanitize
& SANITIZE_ADDRESS
)
1759 /* Phase 3, any partitions that need asan protection
1760 in addition to phase 1 and 2. */
1761 expand_stack_vars (asan_decl_phase_3
, &data
);
1763 if (!data
.asan_vec
.is_empty ())
1765 HOST_WIDE_INT prev_offset
= frame_offset
;
1766 HOST_WIDE_INT offset
1767 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE
,
1768 ASAN_RED_ZONE_SIZE
);
1769 data
.asan_vec
.safe_push (prev_offset
);
1770 data
.asan_vec
.safe_push (offset
);
1773 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
1774 data
.asan_vec
.address (),
1775 data
.asan_decl_vec
. address (),
1776 data
.asan_vec
.length ());
1779 expand_stack_vars (NULL
, &data
);
1781 data
.asan_vec
.release ();
1782 data
.asan_decl_vec
.release ();
1785 fini_vars_expansion ();
1787 /* If there were any artificial non-ignored vars without rtl
1788 found earlier, see if deferred stack allocation hasn't assigned
1790 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
1792 rtx rtl
= DECL_RTL_IF_SET (var
);
1794 /* Keep artificial non-ignored vars in cfun->local_decls
1795 chain until instantiate_decls. */
1796 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1797 add_local_decl (cfun
, var
);
1799 maybe_local_decls
.release ();
1801 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1802 if (STACK_ALIGNMENT_NEEDED
)
1804 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1805 if (!FRAME_GROWS_DOWNWARD
)
1806 frame_offset
+= align
- 1;
1807 frame_offset
&= -align
;
1814 /* If we need to produce a detailed dump, print the tree representation
1815 for STMT to the dump file. SINCE is the last RTX after which the RTL
1816 generated for STMT should have been appended. */
1819 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx since
)
1821 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1823 fprintf (dump_file
, "\n;; ");
1824 print_gimple_stmt (dump_file
, stmt
, 0,
1825 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
1826 fprintf (dump_file
, "\n");
1828 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
1832 /* Maps the blocks that do not contain tree labels to rtx labels. */
1834 static struct pointer_map_t
*lab_rtx_for_bb
;
1836 /* Returns the label_rtx expression for a label starting basic block BB. */
1839 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
1841 gimple_stmt_iterator gsi
;
1846 if (bb
->flags
& BB_RTL
)
1847 return block_label (bb
);
1849 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
1853 /* Find the tree label if it is present. */
1855 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1857 lab_stmt
= gsi_stmt (gsi
);
1858 if (gimple_code (lab_stmt
) != GIMPLE_LABEL
)
1861 lab
= gimple_label_label (lab_stmt
);
1862 if (DECL_NONLOCAL (lab
))
1865 return label_rtx (lab
);
1868 elt
= pointer_map_insert (lab_rtx_for_bb
, bb
);
1869 *elt
= gen_label_rtx ();
1874 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1875 of a basic block where we just expanded the conditional at the end,
1876 possibly clean up the CFG and instruction sequence. LAST is the
1877 last instruction before the just emitted jump sequence. */
1880 maybe_cleanup_end_of_block (edge e
, rtx last
)
1882 /* Special case: when jumpif decides that the condition is
1883 trivial it emits an unconditional jump (and the necessary
1884 barrier). But we still have two edges, the fallthru one is
1885 wrong. purge_dead_edges would clean this up later. Unfortunately
1886 we have to insert insns (and split edges) before
1887 find_many_sub_basic_blocks and hence before purge_dead_edges.
1888 But splitting edges might create new blocks which depend on the
1889 fact that if there are two edges there's no barrier. So the
1890 barrier would get lost and verify_flow_info would ICE. Instead
1891 of auditing all edge splitters to care for the barrier (which
1892 normally isn't there in a cleaned CFG), fix it here. */
1893 if (BARRIER_P (get_last_insn ()))
1897 /* Now, we have a single successor block, if we have insns to
1898 insert on the remaining edge we potentially will insert
1899 it at the end of this block (if the dest block isn't feasible)
1900 in order to avoid splitting the edge. This insertion will take
1901 place in front of the last jump. But we might have emitted
1902 multiple jumps (conditional and one unconditional) to the
1903 same destination. Inserting in front of the last one then
1904 is a problem. See PR 40021. We fix this by deleting all
1905 jumps except the last unconditional one. */
1906 insn
= PREV_INSN (get_last_insn ());
1907 /* Make sure we have an unconditional jump. Otherwise we're
1909 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
1910 for (insn
= PREV_INSN (insn
); insn
!= last
;)
1912 insn
= PREV_INSN (insn
);
1913 if (JUMP_P (NEXT_INSN (insn
)))
1915 if (!any_condjump_p (NEXT_INSN (insn
)))
1917 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
1918 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
1920 delete_insn (NEXT_INSN (insn
));
1926 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1927 Returns a new basic block if we've terminated the current basic
1928 block and created a new one. */
1931 expand_gimple_cond (basic_block bb
, gimple stmt
)
1933 basic_block new_bb
, dest
;
1938 enum tree_code code
;
1941 code
= gimple_cond_code (stmt
);
1942 op0
= gimple_cond_lhs (stmt
);
1943 op1
= gimple_cond_rhs (stmt
);
1944 /* We're sometimes presented with such code:
1948 This would expand to two comparisons which then later might
1949 be cleaned up by combine. But some pattern matchers like if-conversion
1950 work better when there's only one compare, so make up for this
1951 here as special exception if TER would have made the same change. */
1953 && TREE_CODE (op0
) == SSA_NAME
1954 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
1955 && TREE_CODE (op1
) == INTEGER_CST
1956 && ((gimple_cond_code (stmt
) == NE_EXPR
1957 && integer_zerop (op1
))
1958 || (gimple_cond_code (stmt
) == EQ_EXPR
1959 && integer_onep (op1
)))
1960 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
1962 gimple second
= SSA_NAME_DEF_STMT (op0
);
1963 if (gimple_code (second
) == GIMPLE_ASSIGN
)
1965 enum tree_code code2
= gimple_assign_rhs_code (second
);
1966 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
1969 op0
= gimple_assign_rhs1 (second
);
1970 op1
= gimple_assign_rhs2 (second
);
1972 /* If jumps are cheap turn some more codes into
1974 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1976 if ((code2
== BIT_AND_EXPR
1977 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
1978 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
1979 || code2
== TRUTH_AND_EXPR
)
1981 code
= TRUTH_ANDIF_EXPR
;
1982 op0
= gimple_assign_rhs1 (second
);
1983 op1
= gimple_assign_rhs2 (second
);
1985 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
1987 code
= TRUTH_ORIF_EXPR
;
1988 op0
= gimple_assign_rhs1 (second
);
1989 op1
= gimple_assign_rhs2 (second
);
1995 last2
= last
= get_last_insn ();
1997 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1998 set_curr_insn_location (gimple_location (stmt
));
2000 /* These flags have no purpose in RTL land. */
2001 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2002 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2004 /* We can either have a pure conditional jump with one fallthru edge or
2005 two-way jump that needs to be decomposed into two basic blocks. */
2006 if (false_edge
->dest
== bb
->next_bb
)
2008 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2009 true_edge
->probability
);
2010 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2011 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2012 set_curr_insn_location (true_edge
->goto_locus
);
2013 false_edge
->flags
|= EDGE_FALLTHRU
;
2014 maybe_cleanup_end_of_block (false_edge
, last
);
2017 if (true_edge
->dest
== bb
->next_bb
)
2019 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2020 false_edge
->probability
);
2021 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2022 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2023 set_curr_insn_location (false_edge
->goto_locus
);
2024 true_edge
->flags
|= EDGE_FALLTHRU
;
2025 maybe_cleanup_end_of_block (true_edge
, last
);
2029 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2030 true_edge
->probability
);
2031 last
= get_last_insn ();
2032 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2033 set_curr_insn_location (false_edge
->goto_locus
);
2034 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2037 if (BARRIER_P (BB_END (bb
)))
2038 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2039 update_bb_for_insn (bb
);
2041 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2042 dest
= false_edge
->dest
;
2043 redirect_edge_succ (false_edge
, new_bb
);
2044 false_edge
->flags
|= EDGE_FALLTHRU
;
2045 new_bb
->count
= false_edge
->count
;
2046 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
2047 if (current_loops
&& bb
->loop_father
)
2048 add_bb_to_loop (new_bb
, bb
->loop_father
);
2049 new_edge
= make_edge (new_bb
, dest
, 0);
2050 new_edge
->probability
= REG_BR_PROB_BASE
;
2051 new_edge
->count
= new_bb
->count
;
2052 if (BARRIER_P (BB_END (new_bb
)))
2053 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2054 update_bb_for_insn (new_bb
);
2056 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2058 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2060 set_curr_insn_location (true_edge
->goto_locus
);
2061 true_edge
->goto_locus
= curr_insn_location ();
2067 /* Mark all calls that can have a transaction restart. */
2070 mark_transaction_restart_calls (gimple stmt
)
2072 struct tm_restart_node dummy
;
2075 if (!cfun
->gimple_df
->tm_restart
)
2079 slot
= htab_find_slot (cfun
->gimple_df
->tm_restart
, &dummy
, NO_INSERT
);
2082 struct tm_restart_node
*n
= (struct tm_restart_node
*) *slot
;
2083 tree list
= n
->label_or_list
;
2086 for (insn
= next_real_insn (get_last_insn ());
2088 insn
= next_real_insn (insn
))
2091 if (TREE_CODE (list
) == LABEL_DECL
)
2092 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2094 for (; list
; list
= TREE_CHAIN (list
))
2095 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2099 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2103 expand_call_stmt (gimple stmt
)
2105 tree exp
, decl
, lhs
;
2109 if (gimple_call_internal_p (stmt
))
2111 expand_internal_call (stmt
);
2115 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2117 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2118 decl
= gimple_call_fndecl (stmt
);
2119 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2121 /* If this is not a builtin function, the function type through which the
2122 call is made may be different from the type of the function. */
2125 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2126 CALL_EXPR_FN (exp
));
2128 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2129 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2131 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2133 tree arg
= gimple_call_arg (stmt
, i
);
2135 /* TER addresses into arguments of builtin functions so we have a
2136 chance to infer more correct alignment information. See PR39954. */
2138 && TREE_CODE (arg
) == SSA_NAME
2139 && (def
= get_gimple_for_ssa_name (arg
))
2140 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2141 arg
= gimple_assign_rhs1 (def
);
2142 CALL_EXPR_ARG (exp
, i
) = arg
;
2145 if (gimple_has_side_effects (stmt
))
2146 TREE_SIDE_EFFECTS (exp
) = 1;
2148 if (gimple_call_nothrow_p (stmt
))
2149 TREE_NOTHROW (exp
) = 1;
2151 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2152 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2154 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2155 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
2156 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2157 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2159 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2160 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2161 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2163 /* Ensure RTL is created for debug args. */
2164 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2166 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2171 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2173 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2174 expand_debug_expr (dtemp
);
2178 lhs
= gimple_call_lhs (stmt
);
2180 expand_assignment (lhs
, exp
, false);
2182 expand_expr_real_1 (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
, NULL
);
2184 mark_transaction_restart_calls (stmt
);
2187 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2188 STMT that doesn't require special handling for outgoing edges. That
2189 is no tailcalls and no GIMPLE_COND. */
2192 expand_gimple_stmt_1 (gimple stmt
)
2196 set_curr_insn_location (gimple_location (stmt
));
2198 switch (gimple_code (stmt
))
2201 op0
= gimple_goto_dest (stmt
);
2202 if (TREE_CODE (op0
) == LABEL_DECL
)
2205 expand_computed_goto (op0
);
2208 expand_label (gimple_label_label (stmt
));
2211 case GIMPLE_PREDICT
:
2217 expand_asm_stmt (stmt
);
2220 expand_call_stmt (stmt
);
2224 op0
= gimple_return_retval (stmt
);
2226 if (op0
&& op0
!= error_mark_node
)
2228 tree result
= DECL_RESULT (current_function_decl
);
2230 /* If we are not returning the current function's RESULT_DECL,
2231 build an assignment to it. */
2234 /* I believe that a function's RESULT_DECL is unique. */
2235 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
2237 /* ??? We'd like to use simply expand_assignment here,
2238 but this fails if the value is of BLKmode but the return
2239 decl is a register. expand_return has special handling
2240 for this combination, which eventually should move
2241 to common code. See comments there. Until then, let's
2242 build a modify expression :-/ */
2243 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
2248 expand_null_return ();
2250 expand_return (op0
);
2255 tree lhs
= gimple_assign_lhs (stmt
);
2257 /* Tree expand used to fiddle with |= and &= of two bitfield
2258 COMPONENT_REFs here. This can't happen with gimple, the LHS
2259 of binary assigns must be a gimple reg. */
2261 if (TREE_CODE (lhs
) != SSA_NAME
2262 || get_gimple_rhs_class (gimple_expr_code (stmt
))
2263 == GIMPLE_SINGLE_RHS
)
2265 tree rhs
= gimple_assign_rhs1 (stmt
);
2266 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
2267 == GIMPLE_SINGLE_RHS
);
2268 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
))
2269 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
2270 if (TREE_CLOBBER_P (rhs
))
2271 /* This is a clobber to mark the going out of scope for
2275 expand_assignment (lhs
, rhs
,
2276 gimple_assign_nontemporal_move_p (stmt
));
2281 bool nontemporal
= gimple_assign_nontemporal_move_p (stmt
);
2282 struct separate_ops ops
;
2283 bool promoted
= false;
2285 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
2286 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
2289 ops
.code
= gimple_assign_rhs_code (stmt
);
2290 ops
.type
= TREE_TYPE (lhs
);
2291 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
2293 case GIMPLE_TERNARY_RHS
:
2294 ops
.op2
= gimple_assign_rhs3 (stmt
);
2296 case GIMPLE_BINARY_RHS
:
2297 ops
.op1
= gimple_assign_rhs2 (stmt
);
2299 case GIMPLE_UNARY_RHS
:
2300 ops
.op0
= gimple_assign_rhs1 (stmt
);
2305 ops
.location
= gimple_location (stmt
);
2307 /* If we want to use a nontemporal store, force the value to
2308 register first. If we store into a promoted register,
2309 don't directly expand to target. */
2310 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
2311 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
2318 int unsignedp
= SUBREG_PROMOTED_UNSIGNED_P (target
);
2319 /* If TEMP is a VOIDmode constant, use convert_modes to make
2320 sure that we properly convert it. */
2321 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
2323 temp
= convert_modes (GET_MODE (target
),
2324 TYPE_MODE (ops
.type
),
2326 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
2327 GET_MODE (target
), temp
, unsignedp
);
2330 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
2332 else if (nontemporal
&& emit_storent_insn (target
, temp
))
2336 temp
= force_operand (temp
, target
);
2338 emit_move_insn (target
, temp
);
2349 /* Expand one gimple statement STMT and return the last RTL instruction
2350 before any of the newly generated ones.
2352 In addition to generating the necessary RTL instructions this also
2353 sets REG_EH_REGION notes if necessary and sets the current source
2354 location for diagnostics. */
2357 expand_gimple_stmt (gimple stmt
)
2359 location_t saved_location
= input_location
;
2360 rtx last
= get_last_insn ();
2365 /* We need to save and restore the current source location so that errors
2366 discovered during expansion are emitted with the right location. But
2367 it would be better if the diagnostic routines used the source location
2368 embedded in the tree nodes rather than globals. */
2369 if (gimple_has_location (stmt
))
2370 input_location
= gimple_location (stmt
);
2372 expand_gimple_stmt_1 (stmt
);
2374 /* Free any temporaries used to evaluate this statement. */
2377 input_location
= saved_location
;
2379 /* Mark all insns that may trap. */
2380 lp_nr
= lookup_stmt_eh_lp (stmt
);
2384 for (insn
= next_real_insn (last
); insn
;
2385 insn
= next_real_insn (insn
))
2387 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2388 /* If we want exceptions for non-call insns, any
2389 may_trap_p instruction may throw. */
2390 && GET_CODE (PATTERN (insn
)) != CLOBBER
2391 && GET_CODE (PATTERN (insn
)) != USE
2392 && insn_could_throw_p (insn
))
2393 make_reg_eh_region_note (insn
, 0, lp_nr
);
2400 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2401 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2402 generated a tail call (something that might be denied by the ABI
2403 rules governing the call; see calls.c).
2405 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2406 can still reach the rest of BB. The case here is __builtin_sqrt,
2407 where the NaN result goes through the external function (with a
2408 tailcall) and the normal result happens via a sqrt instruction. */
2411 expand_gimple_tailcall (basic_block bb
, gimple stmt
, bool *can_fallthru
)
2419 last2
= last
= expand_gimple_stmt (stmt
);
2421 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
2422 if (CALL_P (last
) && SIBLING_CALL_P (last
))
2425 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2427 *can_fallthru
= true;
2431 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2432 Any instructions emitted here are about to be deleted. */
2433 do_pending_stack_adjust ();
2435 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2436 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2437 EH or abnormal edges, we shouldn't have created a tail call in
2438 the first place. So it seems to me we should just be removing
2439 all edges here, or redirecting the existing fallthru edge to
2445 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2447 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
2449 if (e
->dest
!= EXIT_BLOCK_PTR
)
2451 e
->dest
->count
-= e
->count
;
2452 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
2453 if (e
->dest
->count
< 0)
2455 if (e
->dest
->frequency
< 0)
2456 e
->dest
->frequency
= 0;
2459 probability
+= e
->probability
;
2466 /* This is somewhat ugly: the call_expr expander often emits instructions
2467 after the sibcall (to perform the function return). These confuse the
2468 find_many_sub_basic_blocks code, so we need to get rid of these. */
2469 last
= NEXT_INSN (last
);
2470 gcc_assert (BARRIER_P (last
));
2472 *can_fallthru
= false;
2473 while (NEXT_INSN (last
))
2475 /* For instance an sqrt builtin expander expands if with
2476 sibcall in the then and label for `else`. */
2477 if (LABEL_P (NEXT_INSN (last
)))
2479 *can_fallthru
= true;
2482 delete_insn (NEXT_INSN (last
));
2485 e
= make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_ABNORMAL
| EDGE_SIBCALL
);
2486 e
->probability
+= probability
;
2489 update_bb_for_insn (bb
);
2491 if (NEXT_INSN (last
))
2493 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2496 if (BARRIER_P (last
))
2497 BB_END (bb
) = PREV_INSN (last
);
2500 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2505 /* Return the difference between the floor and the truncated result of
2506 a signed division by OP1 with remainder MOD. */
2508 floor_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2510 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2511 return gen_rtx_IF_THEN_ELSE
2512 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2513 gen_rtx_IF_THEN_ELSE
2514 (mode
, gen_rtx_LT (BImode
,
2515 gen_rtx_DIV (mode
, op1
, mod
),
2517 constm1_rtx
, const0_rtx
),
2521 /* Return the difference between the ceil and the truncated result of
2522 a signed division by OP1 with remainder MOD. */
2524 ceil_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2526 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2527 return gen_rtx_IF_THEN_ELSE
2528 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2529 gen_rtx_IF_THEN_ELSE
2530 (mode
, gen_rtx_GT (BImode
,
2531 gen_rtx_DIV (mode
, op1
, mod
),
2533 const1_rtx
, const0_rtx
),
2537 /* Return the difference between the ceil and the truncated result of
2538 an unsigned division by OP1 with remainder MOD. */
2540 ceil_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
2542 /* (mod != 0 ? 1 : 0) */
2543 return gen_rtx_IF_THEN_ELSE
2544 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2545 const1_rtx
, const0_rtx
);
2548 /* Return the difference between the rounded and the truncated result
2549 of a signed division by OP1 with remainder MOD. Halfway cases are
2550 rounded away from zero, rather than to the nearest even number. */
2552 round_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2554 /* (abs (mod) >= abs (op1) - abs (mod)
2555 ? (op1 / mod > 0 ? 1 : -1)
2557 return gen_rtx_IF_THEN_ELSE
2558 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
2559 gen_rtx_MINUS (mode
,
2560 gen_rtx_ABS (mode
, op1
),
2561 gen_rtx_ABS (mode
, mod
))),
2562 gen_rtx_IF_THEN_ELSE
2563 (mode
, gen_rtx_GT (BImode
,
2564 gen_rtx_DIV (mode
, op1
, mod
),
2566 const1_rtx
, constm1_rtx
),
2570 /* Return the difference between the rounded and the truncated result
2571 of a unsigned division by OP1 with remainder MOD. Halfway cases
2572 are rounded away from zero, rather than to the nearest even
2575 round_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2577 /* (mod >= op1 - mod ? 1 : 0) */
2578 return gen_rtx_IF_THEN_ELSE
2579 (mode
, gen_rtx_GE (BImode
, mod
,
2580 gen_rtx_MINUS (mode
, op1
, mod
)),
2581 const1_rtx
, const0_rtx
);
2584 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2588 convert_debug_memory_address (enum machine_mode mode
, rtx x
,
2591 enum machine_mode xmode
= GET_MODE (x
);
2593 #ifndef POINTERS_EXTEND_UNSIGNED
2594 gcc_assert (mode
== Pmode
2595 || mode
== targetm
.addr_space
.address_mode (as
));
2596 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
2600 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
2602 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
2605 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
2606 x
= simplify_gen_subreg (mode
, x
, xmode
,
2607 subreg_lowpart_offset
2609 else if (POINTERS_EXTEND_UNSIGNED
> 0)
2610 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
2611 else if (!POINTERS_EXTEND_UNSIGNED
)
2612 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
2615 switch (GET_CODE (x
))
2618 if ((SUBREG_PROMOTED_VAR_P (x
)
2619 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
2620 || (GET_CODE (SUBREG_REG (x
)) == PLUS
2621 && REG_P (XEXP (SUBREG_REG (x
), 0))
2622 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
2623 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
2624 && GET_MODE (SUBREG_REG (x
)) == mode
)
2625 return SUBREG_REG (x
);
2628 temp
= gen_rtx_LABEL_REF (mode
, XEXP (x
, 0));
2629 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
2632 temp
= shallow_copy_rtx (x
);
2633 PUT_MODE (temp
, mode
);
2636 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2638 temp
= gen_rtx_CONST (mode
, temp
);
2642 if (CONST_INT_P (XEXP (x
, 1)))
2644 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2646 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
2652 /* Don't know how to express ptr_extend as operation in debug info. */
2655 #endif /* POINTERS_EXTEND_UNSIGNED */
2660 /* Return an RTX equivalent to the value of the parameter DECL. */
2663 expand_debug_parm_decl (tree decl
)
2665 rtx incoming
= DECL_INCOMING_RTL (decl
);
2668 && GET_MODE (incoming
) != BLKmode
2669 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
2670 || (MEM_P (incoming
)
2671 && REG_P (XEXP (incoming
, 0))
2672 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
2674 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
2676 #ifdef HAVE_window_save
2677 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2678 If the target machine has an explicit window save instruction, the
2679 actual entry value is the corresponding OUTGOING_REGNO instead. */
2680 if (REG_P (incoming
)
2681 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
2683 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
2684 OUTGOING_REGNO (REGNO (incoming
)), 0);
2685 else if (MEM_P (incoming
))
2687 rtx reg
= XEXP (incoming
, 0);
2688 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
2690 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
2691 incoming
= replace_equiv_address_nv (incoming
, reg
);
2694 incoming
= copy_rtx (incoming
);
2698 ENTRY_VALUE_EXP (rtl
) = incoming
;
2703 && GET_MODE (incoming
) != BLKmode
2704 && !TREE_ADDRESSABLE (decl
)
2706 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
2707 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
2708 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
2709 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
2710 return copy_rtx (incoming
);
2715 /* Return an RTX equivalent to the value of the tree expression EXP. */
2718 expand_debug_expr (tree exp
)
2720 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
2721 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
2722 enum machine_mode inner_mode
= VOIDmode
;
2723 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
2726 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
2728 case tcc_expression
:
2729 switch (TREE_CODE (exp
))
2733 case WIDEN_MULT_PLUS_EXPR
:
2734 case WIDEN_MULT_MINUS_EXPR
:
2738 case TRUTH_ANDIF_EXPR
:
2739 case TRUTH_ORIF_EXPR
:
2740 case TRUTH_AND_EXPR
:
2742 case TRUTH_XOR_EXPR
:
2745 case TRUTH_NOT_EXPR
:
2754 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
2761 case tcc_comparison
:
2762 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2769 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2770 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2780 case tcc_exceptional
:
2781 case tcc_declaration
:
2787 switch (TREE_CODE (exp
))
2790 if (!lookup_constant_def (exp
))
2792 if (strlen (TREE_STRING_POINTER (exp
)) + 1
2793 != (size_t) TREE_STRING_LENGTH (exp
))
2795 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
2796 op0
= gen_rtx_MEM (BLKmode
, op0
);
2797 set_mem_attributes (op0
, exp
, 0);
2800 /* Fall through... */
2805 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
2809 gcc_assert (COMPLEX_MODE_P (mode
));
2810 op0
= expand_debug_expr (TREE_REALPART (exp
));
2811 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
2812 return gen_rtx_CONCAT (mode
, op0
, op1
);
2814 case DEBUG_EXPR_DECL
:
2815 op0
= DECL_RTL_IF_SET (exp
);
2820 op0
= gen_rtx_DEBUG_EXPR (mode
);
2821 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
2822 SET_DECL_RTL (exp
, op0
);
2832 op0
= DECL_RTL_IF_SET (exp
);
2834 /* This decl was probably optimized away. */
2837 if (TREE_CODE (exp
) != VAR_DECL
2838 || DECL_EXTERNAL (exp
)
2839 || !TREE_STATIC (exp
)
2841 || DECL_HARD_REGISTER (exp
)
2842 || DECL_IN_CONSTANT_POOL (exp
)
2843 || mode
== VOIDmode
)
2846 op0
= make_decl_rtl_for_debug (exp
);
2848 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
2849 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
2853 op0
= copy_rtx (op0
);
2855 if (GET_MODE (op0
) == BLKmode
2856 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2857 below would ICE. While it is likely a FE bug,
2858 try to be robust here. See PR43166. */
2860 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
2862 gcc_assert (MEM_P (op0
));
2863 op0
= adjust_address_nv (op0
, mode
, 0);
2874 inner_mode
= GET_MODE (op0
);
2876 if (mode
== inner_mode
)
2879 if (inner_mode
== VOIDmode
)
2881 if (TREE_CODE (exp
) == SSA_NAME
)
2882 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
2884 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2885 if (mode
== inner_mode
)
2889 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
2891 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
2892 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
2893 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
2894 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
2896 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
2898 else if (FLOAT_MODE_P (mode
))
2900 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
2901 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
2902 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
2904 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
2906 else if (FLOAT_MODE_P (inner_mode
))
2909 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
2911 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
2913 else if (CONSTANT_P (op0
)
2914 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
2915 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
2916 subreg_lowpart_offset (mode
,
2918 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == tcc_unary
2919 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
2921 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
2923 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
2929 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2931 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
2932 TREE_OPERAND (exp
, 0),
2933 TREE_OPERAND (exp
, 1));
2935 return expand_debug_expr (newexp
);
2939 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2940 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2944 if (TREE_CODE (exp
) == MEM_REF
)
2946 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
2947 || (GET_CODE (op0
) == PLUS
2948 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
2949 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2950 Instead just use get_inner_reference. */
2953 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2954 if (!op1
|| !CONST_INT_P (op1
))
2957 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
2960 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2961 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2963 as
= ADDR_SPACE_GENERIC
;
2965 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2967 if (op0
== NULL_RTX
)
2970 op0
= gen_rtx_MEM (mode
, op0
);
2971 set_mem_attributes (op0
, exp
, 0);
2972 if (TREE_CODE (exp
) == MEM_REF
2973 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2974 set_mem_expr (op0
, NULL_TREE
);
2975 set_mem_addr_space (op0
, as
);
2979 case TARGET_MEM_REF
:
2980 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
2981 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
2984 op0
= expand_debug_expr
2985 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
2989 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2990 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2992 as
= ADDR_SPACE_GENERIC
;
2994 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2996 if (op0
== NULL_RTX
)
2999 op0
= gen_rtx_MEM (mode
, op0
);
3001 set_mem_attributes (op0
, exp
, 0);
3002 set_mem_addr_space (op0
, as
);
3008 case ARRAY_RANGE_REF
:
3013 case VIEW_CONVERT_EXPR
:
3015 enum machine_mode mode1
;
3016 HOST_WIDE_INT bitsize
, bitpos
;
3019 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
3020 &mode1
, &unsignedp
, &volatilep
, false);
3026 orig_op0
= op0
= expand_debug_expr (tem
);
3033 enum machine_mode addrmode
, offmode
;
3038 op0
= XEXP (op0
, 0);
3039 addrmode
= GET_MODE (op0
);
3040 if (addrmode
== VOIDmode
)
3043 op1
= expand_debug_expr (offset
);
3047 offmode
= GET_MODE (op1
);
3048 if (offmode
== VOIDmode
)
3049 offmode
= TYPE_MODE (TREE_TYPE (offset
));
3051 if (addrmode
!= offmode
)
3052 op1
= simplify_gen_subreg (addrmode
, op1
, offmode
,
3053 subreg_lowpart_offset (addrmode
,
3056 /* Don't use offset_address here, we don't need a
3057 recognizable address, and we don't want to generate
3059 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
3065 if (mode1
== VOIDmode
)
3067 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
3068 if (bitpos
>= BITS_PER_UNIT
)
3070 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
3071 bitpos
%= BITS_PER_UNIT
;
3073 else if (bitpos
< 0)
3076 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
3077 op0
= adjust_address_nv (op0
, mode1
, units
);
3078 bitpos
+= units
* BITS_PER_UNIT
;
3080 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
3081 op0
= adjust_address_nv (op0
, mode
, 0);
3082 else if (GET_MODE (op0
) != mode1
)
3083 op0
= adjust_address_nv (op0
, mode1
, 0);
3085 op0
= copy_rtx (op0
);
3086 if (op0
== orig_op0
)
3087 op0
= shallow_copy_rtx (op0
);
3088 set_mem_attributes (op0
, exp
, 0);
3091 if (bitpos
== 0 && mode
== GET_MODE (op0
))
3097 if (GET_MODE (op0
) == BLKmode
)
3100 if ((bitpos
% BITS_PER_UNIT
) == 0
3101 && bitsize
== GET_MODE_BITSIZE (mode1
))
3103 enum machine_mode opmode
= GET_MODE (op0
);
3105 if (opmode
== VOIDmode
)
3106 opmode
= TYPE_MODE (TREE_TYPE (tem
));
3108 /* This condition may hold if we're expanding the address
3109 right past the end of an array that turned out not to
3110 be addressable (i.e., the address was only computed in
3111 debug stmts). The gen_subreg below would rightfully
3112 crash, and the address doesn't really exist, so just
3114 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
3117 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
3118 return simplify_gen_subreg (mode
, op0
, opmode
,
3119 bitpos
/ BITS_PER_UNIT
);
3122 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
3123 && TYPE_UNSIGNED (TREE_TYPE (exp
))
3125 : ZERO_EXTRACT
, mode
,
3126 GET_MODE (op0
) != VOIDmode
3128 : TYPE_MODE (TREE_TYPE (tem
)),
3129 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
3133 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
3136 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
3139 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
3142 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3144 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
3147 case FIX_TRUNC_EXPR
:
3148 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
3151 case POINTER_PLUS_EXPR
:
3152 /* For the rare target where pointers are not the same size as
3153 size_t, we need to check for mis-matched modes and correct
3156 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
3157 && GET_MODE (op0
) != GET_MODE (op1
))
3159 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
))
3160 /* If OP0 is a partial mode, then we must truncate, even if it has
3161 the same bitsize as OP1 as GCC's representation of partial modes
3163 || (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_PARTIAL_INT
3164 && GET_MODE_BITSIZE (GET_MODE (op0
)) == GET_MODE_BITSIZE (GET_MODE (op1
))))
3165 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
3168 /* We always sign-extend, regardless of the signedness of
3169 the operand, because the operand is always unsigned
3170 here even if the original C expression is signed. */
3171 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
3176 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
3179 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
3182 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
3185 case TRUNC_DIV_EXPR
:
3186 case EXACT_DIV_EXPR
:
3188 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3190 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
3192 case TRUNC_MOD_EXPR
:
3193 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
3195 case FLOOR_DIV_EXPR
:
3197 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3200 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3201 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3202 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
3203 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3206 case FLOOR_MOD_EXPR
:
3208 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3211 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3212 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
3213 adj
= simplify_gen_unary (NEG
, mode
,
3214 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3216 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3222 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3223 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3224 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
3225 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3229 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3230 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3231 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
3232 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3238 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3239 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
3240 adj
= simplify_gen_unary (NEG
, mode
,
3241 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3243 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3247 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3248 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
3249 adj
= simplify_gen_unary (NEG
, mode
,
3250 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3252 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3255 case ROUND_DIV_EXPR
:
3258 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3259 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3260 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
3261 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3265 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3266 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3267 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
3268 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3271 case ROUND_MOD_EXPR
:
3274 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3275 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
3276 adj
= simplify_gen_unary (NEG
, mode
,
3277 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3279 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3283 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3284 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
3285 adj
= simplify_gen_unary (NEG
, mode
,
3286 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3288 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3292 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
3296 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
3298 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
3301 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
3304 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
3307 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
3310 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
3313 case TRUTH_AND_EXPR
:
3314 return simplify_gen_binary (AND
, mode
, op0
, op1
);
3318 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
3321 case TRUTH_XOR_EXPR
:
3322 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
3324 case TRUTH_ANDIF_EXPR
:
3325 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
3327 case TRUTH_ORIF_EXPR
:
3328 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
3330 case TRUTH_NOT_EXPR
:
3331 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
3334 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
3338 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
3342 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
3346 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
3350 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
3353 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
3355 case UNORDERED_EXPR
:
3356 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
3359 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
3362 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
3365 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
3368 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
3371 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
3374 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
3377 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
3380 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
3383 gcc_assert (COMPLEX_MODE_P (mode
));
3384 if (GET_MODE (op0
) == VOIDmode
)
3385 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
3386 if (GET_MODE (op1
) == VOIDmode
)
3387 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
3388 return gen_rtx_CONCAT (mode
, op0
, op1
);
3391 if (GET_CODE (op0
) == CONCAT
)
3392 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
3393 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
3395 GET_MODE_INNER (mode
)));
3398 enum machine_mode imode
= GET_MODE_INNER (mode
);
3403 re
= adjust_address_nv (op0
, imode
, 0);
3404 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
3408 enum machine_mode ifmode
= int_mode_for_mode (mode
);
3409 enum machine_mode ihmode
= int_mode_for_mode (imode
);
3411 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
3413 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
3416 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
3417 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
3418 if (imode
!= ihmode
)
3419 re
= gen_rtx_SUBREG (imode
, re
, 0);
3420 im
= copy_rtx (op0
);
3422 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
3423 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
3424 if (imode
!= ihmode
)
3425 im
= gen_rtx_SUBREG (imode
, im
, 0);
3427 im
= gen_rtx_NEG (imode
, im
);
3428 return gen_rtx_CONCAT (mode
, re
, im
);
3432 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
3433 if (!op0
|| !MEM_P (op0
))
3435 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
3436 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
3437 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
3438 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
3439 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
3440 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
3442 if (handled_component_p (TREE_OPERAND (exp
, 0)))
3444 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
3446 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
3447 &bitoffset
, &bitsize
, &maxsize
);
3448 if ((TREE_CODE (decl
) == VAR_DECL
3449 || TREE_CODE (decl
) == PARM_DECL
3450 || TREE_CODE (decl
) == RESULT_DECL
)
3451 && (!TREE_ADDRESSABLE (decl
)
3452 || target_for_debug_bind (decl
))
3453 && (bitoffset
% BITS_PER_UNIT
) == 0
3455 && bitsize
== maxsize
)
3457 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
3458 return plus_constant (mode
, base
, bitoffset
/ BITS_PER_UNIT
);
3462 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
3463 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
3466 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
3469 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
3470 || (GET_CODE (op0
) == PLUS
3471 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
3472 && CONST_INT_P (XEXP (op0
, 1)))))
3474 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
3476 if (!op1
|| !CONST_INT_P (op1
))
3479 return plus_constant (mode
, op0
, INTVAL (op1
));
3486 as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
3487 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
3495 op0
= gen_rtx_CONCATN
3496 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
3498 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
3500 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
3503 XVECEXP (op0
, 0, i
) = op1
;
3510 if (TREE_CLOBBER_P (exp
))
3512 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
3517 op0
= gen_rtx_CONCATN
3518 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
3520 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
3522 op1
= expand_debug_expr (val
);
3525 XVECEXP (op0
, 0, i
) = op1
;
3528 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
3530 op1
= expand_debug_expr
3531 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
3536 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
3537 XVECEXP (op0
, 0, i
) = op1
;
3543 goto flag_unsupported
;
3546 /* ??? Maybe handle some builtins? */
3551 gimple g
= get_gimple_for_ssa_name (exp
);
3554 op0
= expand_debug_expr (gimple_assign_rhs_to_tree (g
));
3560 int part
= var_to_partition (SA
.map
, exp
);
3562 if (part
== NO_PARTITION
)
3564 /* If this is a reference to an incoming value of parameter
3565 that is never used in the code or where the incoming
3566 value is never used in the code, use PARM_DECL's
3568 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
3569 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
)
3571 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
3574 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
3581 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
3583 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
3591 /* Vector stuff. For most of the codes we don't have rtl codes. */
3592 case REALIGN_LOAD_EXPR
:
3593 case REDUC_MAX_EXPR
:
3594 case REDUC_MIN_EXPR
:
3595 case REDUC_PLUS_EXPR
:
3597 case VEC_LSHIFT_EXPR
:
3598 case VEC_PACK_FIX_TRUNC_EXPR
:
3599 case VEC_PACK_SAT_EXPR
:
3600 case VEC_PACK_TRUNC_EXPR
:
3601 case VEC_RSHIFT_EXPR
:
3602 case VEC_UNPACK_FLOAT_HI_EXPR
:
3603 case VEC_UNPACK_FLOAT_LO_EXPR
:
3604 case VEC_UNPACK_HI_EXPR
:
3605 case VEC_UNPACK_LO_EXPR
:
3606 case VEC_WIDEN_MULT_HI_EXPR
:
3607 case VEC_WIDEN_MULT_LO_EXPR
:
3608 case VEC_WIDEN_MULT_EVEN_EXPR
:
3609 case VEC_WIDEN_MULT_ODD_EXPR
:
3610 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3611 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3616 case ADDR_SPACE_CONVERT_EXPR
:
3617 case FIXED_CONVERT_EXPR
:
3619 case WITH_SIZE_EXPR
:
3623 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3624 && SCALAR_INT_MODE_P (mode
))
3627 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3629 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3632 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3634 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
3636 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3637 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3641 case WIDEN_MULT_EXPR
:
3642 case WIDEN_MULT_PLUS_EXPR
:
3643 case WIDEN_MULT_MINUS_EXPR
:
3644 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3645 && SCALAR_INT_MODE_P (mode
))
3647 inner_mode
= GET_MODE (op0
);
3648 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3649 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3651 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3652 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
3653 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
3655 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
3656 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3657 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
3659 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
3660 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3662 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
3666 case MULT_HIGHPART_EXPR
:
3667 /* ??? Similar to the above. */
3670 case WIDEN_SUM_EXPR
:
3671 case WIDEN_LSHIFT_EXPR
:
3672 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3673 && SCALAR_INT_MODE_P (mode
))
3676 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3678 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3680 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
3681 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
3686 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
3690 #ifdef ENABLE_CHECKING
3699 /* Return an RTX equivalent to the source bind value of the tree expression
3703 expand_debug_source_expr (tree exp
)
3706 enum machine_mode mode
= VOIDmode
, inner_mode
;
3708 switch (TREE_CODE (exp
))
3712 mode
= DECL_MODE (exp
);
3713 op0
= expand_debug_parm_decl (exp
);
3716 /* See if this isn't an argument that has been completely
3718 if (!DECL_RTL_SET_P (exp
)
3719 && !DECL_INCOMING_RTL (exp
)
3720 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
3722 tree aexp
= DECL_ORIGIN (exp
);
3723 if (DECL_CONTEXT (aexp
)
3724 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
3726 vec
<tree
, va_gc
> **debug_args
;
3729 debug_args
= decl_debug_args_lookup (current_function_decl
);
3730 if (debug_args
!= NULL
)
3732 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
3735 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
3745 if (op0
== NULL_RTX
)
3748 inner_mode
= GET_MODE (op0
);
3749 if (mode
== inner_mode
)
3752 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
3754 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
3755 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
3756 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
3757 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
3759 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
3761 else if (FLOAT_MODE_P (mode
))
3763 else if (FLOAT_MODE_P (inner_mode
))
3765 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3766 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
3768 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
3770 else if (CONSTANT_P (op0
)
3771 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
3772 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
3773 subreg_lowpart_offset (mode
, inner_mode
));
3774 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3775 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3777 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3782 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3783 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3784 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
3787 avoid_complex_debug_insns (rtx insn
, rtx
*exp_p
, int depth
)
3791 if (exp
== NULL_RTX
)
3794 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
3799 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3800 rtx dval
= make_debug_expr_from_rtl (exp
);
3802 /* Emit a debug bind insn before INSN. */
3803 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
3804 DEBUG_EXPR_TREE_DECL (dval
), exp
,
3805 VAR_INIT_STATUS_INITIALIZED
);
3807 emit_debug_insn_before (bind
, insn
);
3812 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
3814 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
3815 switch (*format_ptr
++)
3818 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
3823 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
3824 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
3832 /* Expand the _LOCs in debug insns. We run this after expanding all
3833 regular insns, so that any variables referenced in the function
3834 will have their DECL_RTLs set. */
3837 expand_debug_locations (void)
3840 rtx last
= get_last_insn ();
3841 int save_strict_alias
= flag_strict_aliasing
;
3843 /* New alias sets while setting up memory attributes cause
3844 -fcompare-debug failures, even though it doesn't bring about any
3846 flag_strict_aliasing
= 0;
3848 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3849 if (DEBUG_INSN_P (insn
))
3851 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
3852 rtx val
, prev_insn
, insn2
;
3853 enum machine_mode mode
;
3855 if (value
== NULL_TREE
)
3859 if (INSN_VAR_LOCATION_STATUS (insn
)
3860 == VAR_INIT_STATUS_UNINITIALIZED
)
3861 val
= expand_debug_source_expr (value
);
3863 val
= expand_debug_expr (value
);
3864 gcc_assert (last
== get_last_insn ());
3868 val
= gen_rtx_UNKNOWN_VAR_LOC ();
3871 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
3873 gcc_assert (mode
== GET_MODE (val
)
3874 || (GET_MODE (val
) == VOIDmode
3875 && (CONST_SCALAR_INT_P (val
)
3876 || GET_CODE (val
) == CONST_FIXED
3877 || GET_CODE (val
) == LABEL_REF
)));
3880 INSN_VAR_LOCATION_LOC (insn
) = val
;
3881 prev_insn
= PREV_INSN (insn
);
3882 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
3883 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
3886 flag_strict_aliasing
= save_strict_alias
;
3889 /* Expand basic block BB from GIMPLE trees to RTL. */
3892 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
3894 gimple_stmt_iterator gsi
;
3903 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
3906 /* Note that since we are now transitioning from GIMPLE to RTL, we
3907 cannot use the gsi_*_bb() routines because they expect the basic
3908 block to be in GIMPLE, instead of RTL. Therefore, we need to
3909 access the BB sequence directly. */
3910 stmts
= bb_seq (bb
);
3911 bb
->il
.gimple
.seq
= NULL
;
3912 bb
->il
.gimple
.phi_nodes
= NULL
;
3913 rtl_profile_for_bb (bb
);
3914 init_rtl_bb_info (bb
);
3915 bb
->flags
|= BB_RTL
;
3917 /* Remove the RETURN_EXPR if we may fall though to the exit
3919 gsi
= gsi_last (stmts
);
3920 if (!gsi_end_p (gsi
)
3921 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
3923 gimple ret_stmt
= gsi_stmt (gsi
);
3925 gcc_assert (single_succ_p (bb
));
3926 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR
);
3928 if (bb
->next_bb
== EXIT_BLOCK_PTR
3929 && !gimple_return_retval (ret_stmt
))
3931 gsi_remove (&gsi
, false);
3932 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
3936 gsi
= gsi_start (stmts
);
3937 if (!gsi_end_p (gsi
))
3939 stmt
= gsi_stmt (gsi
);
3940 if (gimple_code (stmt
) != GIMPLE_LABEL
)
3944 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
3948 last
= get_last_insn ();
3952 expand_gimple_stmt (stmt
);
3957 emit_label ((rtx
) *elt
);
3959 /* Java emits line number notes in the top of labels.
3960 ??? Make this go away once line number notes are obsoleted. */
3961 BB_HEAD (bb
) = NEXT_INSN (last
);
3962 if (NOTE_P (BB_HEAD (bb
)))
3963 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
3964 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
3966 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3969 note
= BB_HEAD (bb
) = emit_note (NOTE_INSN_BASIC_BLOCK
);
3971 NOTE_BASIC_BLOCK (note
) = bb
;
3973 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3977 stmt
= gsi_stmt (gsi
);
3979 /* If this statement is a non-debug one, and we generate debug
3980 insns, then this one might be the last real use of a TERed
3981 SSA_NAME, but where there are still some debug uses further
3982 down. Expanding the current SSA name in such further debug
3983 uses by their RHS might lead to wrong debug info, as coalescing
3984 might make the operands of such RHS be placed into the same
3985 pseudo as something else. Like so:
3986 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3990 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3991 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3992 the write to a_2 would actually have clobbered the place which
3995 So, instead of that, we recognize the situation, and generate
3996 debug temporaries at the last real use of TERed SSA names:
4003 if (MAY_HAVE_DEBUG_INSNS
4005 && !is_gimple_debug (stmt
))
4011 location_t sloc
= curr_insn_location ();
4013 /* Look for SSA names that have their last use here (TERed
4014 names always have only one real use). */
4015 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4016 if ((def
= get_gimple_for_ssa_name (op
)))
4018 imm_use_iterator imm_iter
;
4019 use_operand_p use_p
;
4020 bool have_debug_uses
= false;
4022 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
4024 if (gimple_debug_bind_p (USE_STMT (use_p
)))
4026 have_debug_uses
= true;
4031 if (have_debug_uses
)
4033 /* OP is a TERed SSA name, with DEF it's defining
4034 statement, and where OP is used in further debug
4035 instructions. Generate a debug temporary, and
4036 replace all uses of OP in debug insns with that
4039 tree value
= gimple_assign_rhs_to_tree (def
);
4040 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
4042 enum machine_mode mode
;
4044 set_curr_insn_location (gimple_location (def
));
4046 DECL_ARTIFICIAL (vexpr
) = 1;
4047 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
4049 mode
= DECL_MODE (value
);
4051 mode
= TYPE_MODE (TREE_TYPE (value
));
4052 DECL_MODE (vexpr
) = mode
;
4054 val
= gen_rtx_VAR_LOCATION
4055 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
4057 emit_debug_insn (val
);
4059 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
4061 if (!gimple_debug_bind_p (debugstmt
))
4064 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
4065 SET_USE (use_p
, vexpr
);
4067 update_stmt (debugstmt
);
4071 set_curr_insn_location (sloc
);
4074 currently_expanding_gimple_stmt
= stmt
;
4076 /* Expand this statement, then evaluate the resulting RTL and
4077 fixup the CFG accordingly. */
4078 if (gimple_code (stmt
) == GIMPLE_COND
)
4080 new_bb
= expand_gimple_cond (bb
, stmt
);
4084 else if (gimple_debug_bind_p (stmt
))
4086 location_t sloc
= curr_insn_location ();
4087 gimple_stmt_iterator nsi
= gsi
;
4091 tree var
= gimple_debug_bind_get_var (stmt
);
4094 enum machine_mode mode
;
4096 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
4097 && TREE_CODE (var
) != LABEL_DECL
4098 && !target_for_debug_bind (var
))
4099 goto delink_debug_stmt
;
4101 if (gimple_debug_bind_has_value_p (stmt
))
4102 value
= gimple_debug_bind_get_value (stmt
);
4106 last
= get_last_insn ();
4108 set_curr_insn_location (gimple_location (stmt
));
4111 mode
= DECL_MODE (var
);
4113 mode
= TYPE_MODE (TREE_TYPE (var
));
4115 val
= gen_rtx_VAR_LOCATION
4116 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
4118 emit_debug_insn (val
);
4120 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4122 /* We can't dump the insn with a TREE where an RTX
4124 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
4125 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
4126 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
4130 /* In order not to generate too many debug temporaries,
4131 we delink all uses of debug statements we already expanded.
4132 Therefore debug statements between definition and real
4133 use of TERed SSA names will continue to use the SSA name,
4134 and not be replaced with debug temps. */
4135 delink_stmt_imm_use (stmt
);
4139 if (gsi_end_p (nsi
))
4141 stmt
= gsi_stmt (nsi
);
4142 if (!gimple_debug_bind_p (stmt
))
4146 set_curr_insn_location (sloc
);
4148 else if (gimple_debug_source_bind_p (stmt
))
4150 location_t sloc
= curr_insn_location ();
4151 tree var
= gimple_debug_source_bind_get_var (stmt
);
4152 tree value
= gimple_debug_source_bind_get_value (stmt
);
4154 enum machine_mode mode
;
4156 last
= get_last_insn ();
4158 set_curr_insn_location (gimple_location (stmt
));
4160 mode
= DECL_MODE (var
);
4162 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
4163 VAR_INIT_STATUS_UNINITIALIZED
);
4165 emit_debug_insn (val
);
4167 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4169 /* We can't dump the insn with a TREE where an RTX
4171 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
4172 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
4173 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
4176 set_curr_insn_location (sloc
);
4180 if (is_gimple_call (stmt
)
4181 && gimple_call_tail_p (stmt
)
4182 && disable_tail_calls
)
4183 gimple_call_set_tail (stmt
, false);
4185 if (is_gimple_call (stmt
) && gimple_call_tail_p (stmt
))
4188 new_bb
= expand_gimple_tailcall (bb
, stmt
, &can_fallthru
);
4199 def_operand_p def_p
;
4200 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
4204 /* Ignore this stmt if it is in the list of
4205 replaceable expressions. */
4207 && bitmap_bit_p (SA
.values
,
4208 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
4211 last
= expand_gimple_stmt (stmt
);
4212 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
4217 currently_expanding_gimple_stmt
= NULL
;
4219 /* Expand implicit goto and convert goto_locus. */
4220 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4222 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
4223 set_curr_insn_location (e
->goto_locus
);
4224 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
4226 emit_jump (label_rtx_for_bb (e
->dest
));
4227 e
->flags
&= ~EDGE_FALLTHRU
;
4231 /* Expanded RTL can create a jump in the last instruction of block.
4232 This later might be assumed to be a jump to successor and break edge insertion.
4233 We need to insert dummy move to prevent this. PR41440. */
4234 if (single_succ_p (bb
)
4235 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
4236 && (last
= get_last_insn ())
4239 rtx dummy
= gen_reg_rtx (SImode
);
4240 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
4243 do_pending_stack_adjust ();
4245 /* Find the block tail. The last insn in the block is the insn
4246 before a barrier and/or table jump insn. */
4247 last
= get_last_insn ();
4248 if (BARRIER_P (last
))
4249 last
= PREV_INSN (last
);
4250 if (JUMP_TABLE_DATA_P (last
))
4251 last
= PREV_INSN (PREV_INSN (last
));
4254 update_bb_for_insn (bb
);
4260 /* Create a basic block for initialization code. */
4263 construct_init_block (void)
4265 basic_block init_block
, first_block
;
4269 /* Multiple entry points not supported yet. */
4270 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR
->succs
) == 1);
4271 init_rtl_bb_info (ENTRY_BLOCK_PTR
);
4272 init_rtl_bb_info (EXIT_BLOCK_PTR
);
4273 ENTRY_BLOCK_PTR
->flags
|= BB_RTL
;
4274 EXIT_BLOCK_PTR
->flags
|= BB_RTL
;
4276 e
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0);
4278 /* When entry edge points to first basic block, we don't need jump,
4279 otherwise we have to jump into proper target. */
4280 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR
->next_bb
)
4282 tree label
= gimple_block_label (e
->dest
);
4284 emit_jump (label_rtx (label
));
4288 flags
= EDGE_FALLTHRU
;
4290 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
4293 init_block
->frequency
= ENTRY_BLOCK_PTR
->frequency
;
4294 init_block
->count
= ENTRY_BLOCK_PTR
->count
;
4295 if (current_loops
&& ENTRY_BLOCK_PTR
->loop_father
)
4296 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR
->loop_father
);
4299 first_block
= e
->dest
;
4300 redirect_edge_succ (e
, init_block
);
4301 e
= make_edge (init_block
, first_block
, flags
);
4304 e
= make_edge (init_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
4305 e
->probability
= REG_BR_PROB_BASE
;
4306 e
->count
= ENTRY_BLOCK_PTR
->count
;
4308 update_bb_for_insn (init_block
);
4312 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4313 found in the block tree. */
4316 set_block_levels (tree block
, int level
)
4320 BLOCK_NUMBER (block
) = level
;
4321 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
4322 block
= BLOCK_CHAIN (block
);
4326 /* Create a block containing landing pads and similar stuff. */
4329 construct_exit_block (void)
4331 rtx head
= get_last_insn ();
4333 basic_block exit_block
;
4337 rtx orig_end
= BB_END (EXIT_BLOCK_PTR
->prev_bb
);
4339 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
4341 /* Make sure the locus is set to the end of the function, so that
4342 epilogue line numbers and warnings are set properly. */
4343 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
4344 input_location
= cfun
->function_end_locus
;
4346 /* Generate rtl for function exit. */
4347 expand_function_end ();
4349 end
= get_last_insn ();
4352 /* While emitting the function end we could move end of the last basic block.
4354 BB_END (EXIT_BLOCK_PTR
->prev_bb
) = orig_end
;
4355 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
4356 head
= NEXT_INSN (head
);
4357 exit_block
= create_basic_block (NEXT_INSN (head
), end
,
4358 EXIT_BLOCK_PTR
->prev_bb
);
4359 exit_block
->frequency
= EXIT_BLOCK_PTR
->frequency
;
4360 exit_block
->count
= EXIT_BLOCK_PTR
->count
;
4361 if (current_loops
&& EXIT_BLOCK_PTR
->loop_father
)
4362 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR
->loop_father
);
4365 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR
->preds
))
4367 e
= EDGE_PRED (EXIT_BLOCK_PTR
, ix
);
4368 if (!(e
->flags
& EDGE_ABNORMAL
))
4369 redirect_edge_succ (e
, exit_block
);
4374 e
= make_edge (exit_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
4375 e
->probability
= REG_BR_PROB_BASE
;
4376 e
->count
= EXIT_BLOCK_PTR
->count
;
4377 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR
->preds
)
4380 e
->count
-= e2
->count
;
4381 exit_block
->count
-= e2
->count
;
4382 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
4386 if (exit_block
->count
< 0)
4387 exit_block
->count
= 0;
4388 if (exit_block
->frequency
< 0)
4389 exit_block
->frequency
= 0;
4390 update_bb_for_insn (exit_block
);
4393 /* Helper function for discover_nonconstant_array_refs.
4394 Look for ARRAY_REF nodes with non-constant indexes and mark them
4398 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
4399 void *data ATTRIBUTE_UNUSED
)
4403 if (IS_TYPE_OR_DECL_P (t
))
4405 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4407 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4408 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
4409 && (!TREE_OPERAND (t
, 2)
4410 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4411 || (TREE_CODE (t
) == COMPONENT_REF
4412 && (!TREE_OPERAND (t
,2)
4413 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4414 || TREE_CODE (t
) == BIT_FIELD_REF
4415 || TREE_CODE (t
) == REALPART_EXPR
4416 || TREE_CODE (t
) == IMAGPART_EXPR
4417 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
4418 || CONVERT_EXPR_P (t
))
4419 t
= TREE_OPERAND (t
, 0);
4421 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4423 t
= get_base_address (t
);
4425 && DECL_MODE (t
) != BLKmode
)
4426 TREE_ADDRESSABLE (t
) = 1;
4435 /* RTL expansion is not able to compile array references with variable
4436 offsets for arrays stored in single register. Discover such
4437 expressions and mark variables as addressable to avoid this
4441 discover_nonconstant_array_refs (void)
4444 gimple_stmt_iterator gsi
;
4447 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4449 gimple stmt
= gsi_stmt (gsi
);
4450 if (!is_gimple_debug (stmt
))
4451 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
4455 /* This function sets crtl->args.internal_arg_pointer to a virtual
4456 register if DRAP is needed. Local register allocator will replace
4457 virtual_incoming_args_rtx with the virtual register. */
4460 expand_stack_alignment (void)
4463 unsigned int preferred_stack_boundary
;
4465 if (! SUPPORTS_STACK_ALIGNMENT
)
4468 if (cfun
->calls_alloca
4469 || cfun
->has_nonlocal_label
4470 || crtl
->has_nonlocal_goto
)
4471 crtl
->need_drap
= true;
4473 /* Call update_stack_boundary here again to update incoming stack
4474 boundary. It may set incoming stack alignment to a different
4475 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4476 use the minimum incoming stack alignment to check if it is OK
4477 to perform sibcall optimization since sibcall optimization will
4478 only align the outgoing stack to incoming stack boundary. */
4479 if (targetm
.calls
.update_stack_boundary
)
4480 targetm
.calls
.update_stack_boundary ();
4482 /* The incoming stack frame has to be aligned at least at
4483 parm_stack_boundary. */
4484 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
4486 /* Update crtl->stack_alignment_estimated and use it later to align
4487 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4488 exceptions since callgraph doesn't collect incoming stack alignment
4490 if (cfun
->can_throw_non_call_exceptions
4491 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
4492 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
4494 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
4495 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
4496 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
4497 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
4498 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
4500 gcc_assert (crtl
->stack_alignment_needed
4501 <= crtl
->stack_alignment_estimated
);
4503 crtl
->stack_realign_needed
4504 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
4505 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
4507 crtl
->stack_realign_processed
= true;
4509 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4511 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
4512 drap_rtx
= targetm
.calls
.get_drap_rtx ();
4514 /* stack_realign_drap and drap_rtx must match. */
4515 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
4517 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4518 if (NULL
!= drap_rtx
)
4520 crtl
->args
.internal_arg_pointer
= drap_rtx
;
4522 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4524 fixup_tail_calls ();
4528 /* Translate the intermediate representation contained in the CFG
4529 from GIMPLE trees to RTL.
4531 We do conversion per basic block and preserve/update the tree CFG.
4532 This implies we have to do some magic as the CFG can simultaneously
4533 consist of basic blocks containing RTL and GIMPLE trees. This can
4534 confuse the CFG hooks, so be careful to not manipulate CFG during
4538 gimple_expand_cfg (void)
4540 basic_block bb
, init_block
;
4544 rtx var_seq
, var_ret_seq
;
4547 timevar_push (TV_OUT_OF_SSA
);
4548 rewrite_out_of_ssa (&SA
);
4549 timevar_pop (TV_OUT_OF_SSA
);
4550 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
4552 /* Make sure all values used by the optimization passes have sane
4556 /* Some backends want to know that we are expanding to RTL. */
4557 currently_expanding_to_rtl
= 1;
4558 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4559 free_dominance_info (CDI_DOMINATORS
);
4561 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
4563 insn_locations_init ();
4564 if (!DECL_IS_BUILTIN (current_function_decl
))
4566 /* Eventually, all FEs should explicitly set function_start_locus. */
4567 if (LOCATION_LOCUS (cfun
->function_start_locus
) == UNKNOWN_LOCATION
)
4568 set_curr_insn_location
4569 (DECL_SOURCE_LOCATION (current_function_decl
));
4571 set_curr_insn_location (cfun
->function_start_locus
);
4574 set_curr_insn_location (UNKNOWN_LOCATION
);
4575 prologue_location
= curr_insn_location ();
4577 #ifdef INSN_SCHEDULING
4578 init_sched_attrs ();
4581 /* Make sure first insn is a note even if we don't want linenums.
4582 This makes sure the first insn will never be deleted.
4583 Also, final expects a note to appear there. */
4584 emit_note (NOTE_INSN_DELETED
);
4586 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4587 discover_nonconstant_array_refs ();
4589 targetm
.expand_to_rtl_hook ();
4590 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
4591 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
4592 crtl
->stack_alignment_estimated
= 0;
4593 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
4594 cfun
->cfg
->max_jumptable_ents
= 0;
4596 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4597 of the function section at exapnsion time to predict distance of calls. */
4598 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
4600 /* Expand the variables recorded during gimple lowering. */
4601 timevar_push (TV_VAR_EXPAND
);
4604 var_ret_seq
= expand_used_vars ();
4606 var_seq
= get_insns ();
4608 timevar_pop (TV_VAR_EXPAND
);
4610 /* Honor stack protection warnings. */
4611 if (warn_stack_protect
)
4613 if (cfun
->calls_alloca
)
4614 warning (OPT_Wstack_protector
,
4615 "stack protector not protecting local variables: "
4616 "variable length buffer");
4617 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
4618 warning (OPT_Wstack_protector
,
4619 "stack protector not protecting function: "
4620 "all local arrays are less than %d bytes long",
4621 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
4624 /* Set up parameters and prepare for return, for the function. */
4625 expand_function_start (current_function_decl
);
4627 /* If we emitted any instructions for setting up the variables,
4628 emit them before the FUNCTION_START note. */
4631 emit_insn_before (var_seq
, parm_birth_insn
);
4633 /* In expand_function_end we'll insert the alloca save/restore
4634 before parm_birth_insn. We've just insertted an alloca call.
4635 Adjust the pointer to match. */
4636 parm_birth_insn
= var_seq
;
4639 /* Now that we also have the parameter RTXs, copy them over to our
4641 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
4643 tree var
= SSA_NAME_VAR (partition_to_var (SA
.map
, i
));
4645 if (TREE_CODE (var
) != VAR_DECL
4646 && !SA
.partition_to_pseudo
[i
])
4647 SA
.partition_to_pseudo
[i
] = DECL_RTL_IF_SET (var
);
4648 gcc_assert (SA
.partition_to_pseudo
[i
]);
4650 /* If this decl was marked as living in multiple places, reset
4651 this now to NULL. */
4652 if (DECL_RTL_IF_SET (var
) == pc_rtx
)
4653 SET_DECL_RTL (var
, NULL
);
4655 /* Some RTL parts really want to look at DECL_RTL(x) when x
4656 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4657 SET_DECL_RTL here making this available, but that would mean
4658 to select one of the potentially many RTLs for one DECL. Instead
4659 of doing that we simply reset the MEM_EXPR of the RTL in question,
4660 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4661 if (!DECL_RTL_SET_P (var
))
4663 if (MEM_P (SA
.partition_to_pseudo
[i
]))
4664 set_mem_expr (SA
.partition_to_pseudo
[i
], NULL
);
4668 /* If we have a class containing differently aligned pointers
4669 we need to merge those into the corresponding RTL pointer
4671 for (i
= 1; i
< num_ssa_names
; i
++)
4673 tree name
= ssa_name (i
);
4678 /* We might have generated new SSA names in
4679 update_alias_info_with_stack_vars. They will have a NULL
4680 defining statements, and won't be part of the partitioning,
4682 || !SSA_NAME_DEF_STMT (name
))
4684 part
= var_to_partition (SA
.map
, name
);
4685 if (part
== NO_PARTITION
)
4688 /* Adjust all partition members to get the underlying decl of
4689 the representative which we might have created in expand_one_var. */
4690 if (SSA_NAME_VAR (name
) == NULL_TREE
)
4692 tree leader
= partition_to_var (SA
.map
, part
);
4693 gcc_assert (SSA_NAME_VAR (leader
) != NULL_TREE
);
4694 replace_ssa_name_symbol (name
, SSA_NAME_VAR (leader
));
4696 if (!POINTER_TYPE_P (TREE_TYPE (name
)))
4699 r
= SA
.partition_to_pseudo
[part
];
4701 mark_reg_pointer (r
, get_pointer_alignment (name
));
4704 /* If this function is `main', emit a call to `__main'
4705 to run global initializers, etc. */
4706 if (DECL_NAME (current_function_decl
)
4707 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
4708 && DECL_FILE_SCOPE_P (current_function_decl
))
4709 expand_main_function ();
4711 /* Initialize the stack_protect_guard field. This must happen after the
4712 call to __main (if any) so that the external decl is initialized. */
4713 if (crtl
->stack_protect_guard
)
4714 stack_protect_prologue ();
4716 expand_phi_nodes (&SA
);
4718 /* Register rtl specific functions for cfg. */
4719 rtl_register_cfg_hooks ();
4721 init_block
= construct_init_block ();
4723 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4724 remaining edges later. */
4725 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
4726 e
->flags
&= ~EDGE_EXECUTABLE
;
4728 lab_rtx_for_bb
= pointer_map_create ();
4729 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4730 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
4732 if (MAY_HAVE_DEBUG_INSNS
)
4733 expand_debug_locations ();
4735 /* Free stuff we no longer need after GIMPLE optimizations. */
4736 free_dominance_info (CDI_DOMINATORS
);
4737 free_dominance_info (CDI_POST_DOMINATORS
);
4738 delete_tree_cfg_annotations ();
4740 timevar_push (TV_OUT_OF_SSA
);
4741 finish_out_of_ssa (&SA
);
4742 timevar_pop (TV_OUT_OF_SSA
);
4744 timevar_push (TV_POST_EXPAND
);
4745 /* We are no longer in SSA form. */
4746 cfun
->gimple_df
->in_ssa_p
= false;
4748 loops_state_clear (LOOP_CLOSED_SSA
);
4750 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4751 conservatively to true until they are all profile aware. */
4752 pointer_map_destroy (lab_rtx_for_bb
);
4755 construct_exit_block ();
4756 insn_locations_finalize ();
4760 rtx after
= return_label
;
4761 rtx next
= NEXT_INSN (after
);
4762 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
4764 emit_insn_after (var_ret_seq
, after
);
4767 /* Zap the tree EH table. */
4768 set_eh_throw_stmt_table (cfun
, NULL
);
4770 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4771 split edges which edge insertions might do. */
4772 rebuild_jump_labels (get_insns ());
4774 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
4778 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4782 rebuild_jump_labels_chain (e
->insns
.r
);
4783 /* Avoid putting insns before parm_birth_insn. */
4784 if (e
->src
== ENTRY_BLOCK_PTR
4785 && single_succ_p (ENTRY_BLOCK_PTR
)
4788 rtx insns
= e
->insns
.r
;
4789 e
->insns
.r
= NULL_RTX
;
4790 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
4793 commit_one_edge_insertion (e
);
4800 /* We're done expanding trees to RTL. */
4801 currently_expanding_to_rtl
= 0;
4803 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4807 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4809 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4810 e
->flags
&= ~EDGE_EXECUTABLE
;
4812 /* At the moment not all abnormal edges match the RTL
4813 representation. It is safe to remove them here as
4814 find_many_sub_basic_blocks will rediscover them.
4815 In the future we should get this fixed properly. */
4816 if ((e
->flags
& EDGE_ABNORMAL
)
4817 && !(e
->flags
& EDGE_SIBCALL
))
4824 blocks
= sbitmap_alloc (last_basic_block
);
4825 bitmap_ones (blocks
);
4826 find_many_sub_basic_blocks (blocks
);
4827 sbitmap_free (blocks
);
4828 purge_all_dead_edges ();
4830 expand_stack_alignment ();
4832 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4834 if (crtl
->tail_call_emit
)
4835 fixup_tail_calls ();
4837 /* After initial rtl generation, call back to finish generating
4838 exception support code. We need to do this before cleaning up
4839 the CFG as the code does not expect dead landing pads. */
4840 if (cfun
->eh
->region_tree
!= NULL
)
4841 finish_eh_generation ();
4843 /* Remove unreachable blocks, otherwise we cannot compute dominators
4844 which are needed for loop state verification. As a side-effect
4845 this also compacts blocks.
4846 ??? We cannot remove trivially dead insns here as for example
4847 the DRAP reg on i?86 is not magically live at this point.
4848 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4849 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
4851 #ifdef ENABLE_CHECKING
4852 verify_flow_info ();
4855 /* Initialize pseudos allocated for hard registers. */
4856 emit_initial_value_sets ();
4858 /* And finally unshare all RTL. */
4861 /* There's no need to defer outputting this function any more; we
4862 know we want to output it. */
4863 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
4865 /* Now that we're done expanding trees to RTL, we shouldn't have any
4866 more CONCATs anywhere. */
4867 generating_concat_p
= 0;
4872 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4873 /* And the pass manager will dump RTL for us. */
4876 /* If we're emitting a nested function, make sure its parent gets
4877 emitted as well. Doing otherwise confuses debug info. */
4880 for (parent
= DECL_CONTEXT (current_function_decl
);
4881 parent
!= NULL_TREE
;
4882 parent
= get_containing_scope (parent
))
4883 if (TREE_CODE (parent
) == FUNCTION_DECL
)
4884 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
4887 /* We are now committed to emitting code for this function. Do any
4888 preparation, such as emitting abstract debug info for the inline
4889 before it gets mangled by optimization. */
4890 if (cgraph_function_possibly_inlined_p (current_function_decl
))
4891 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
4893 TREE_ASM_WRITTEN (current_function_decl
) = 1;
4895 /* After expanding, the return labels are no longer needed. */
4896 return_label
= NULL
;
4897 naked_return_label
= NULL
;
4899 /* After expanding, the tm_restart map is no longer needed. */
4900 if (cfun
->gimple_df
->tm_restart
)
4902 htab_delete (cfun
->gimple_df
->tm_restart
);
4903 cfun
->gimple_df
->tm_restart
= NULL
;
4906 /* Tag the blocks with a depth number so that change_scope can find
4907 the common parent easily. */
4908 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
4909 default_rtl_profile ();
4911 timevar_pop (TV_POST_EXPAND
);
4918 const pass_data pass_data_expand
=
4920 RTL_PASS
, /* type */
4921 "expand", /* name */
4922 OPTGROUP_NONE
, /* optinfo_flags */
4923 false, /* has_gate */
4924 true, /* has_execute */
4925 TV_EXPAND
, /* tv_id */
4926 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
4928 | PROP_gimple_lvec
), /* properties_required */
4929 PROP_rtl
, /* properties_provided */
4930 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
4931 ( TODO_verify_ssa
| TODO_verify_flow
4932 | TODO_verify_stmts
), /* todo_flags_start */
4933 0, /* todo_flags_finish */
4936 class pass_expand
: public rtl_opt_pass
4939 pass_expand (gcc::context
*ctxt
)
4940 : rtl_opt_pass (pass_data_expand
, ctxt
)
4943 /* opt_pass methods: */
4944 unsigned int execute () { return gimple_expand_cfg (); }
4946 }; // class pass_expand
4951 make_pass_expand (gcc::context
*ctxt
)
4953 return new pass_expand (ctxt
);