1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "hard-reg-set.h"
27 #include "stringpool.h"
29 #include "stor-layout.h"
31 #include "print-tree.h"
33 #include "basic-block.h"
36 #include "langhooks.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
42 #include "gimple-expr.h"
45 #include "gimple-iterator.h"
46 #include "gimple-walk.h"
47 #include "gimple-ssa.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "tree-ssanames.h"
55 #include "tree-pass.h"
58 #include "diagnostic.h"
59 #include "gimple-pretty-print.h"
63 #include "tree-inline.h"
64 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
70 #include "regs.h" /* For reg_renumber. */
71 #include "insn-attr.h" /* For INSN_SCHEDULING. */
73 #include "tree-ssa-address.h"
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* This variable holds information helping the rewriting of SSA trees
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple currently_expanding_gimple_stmt
;
94 static rtx
expand_debug_expr (tree
);
96 /* Return an expression tree corresponding to the RHS of GIMPLE
100 gimple_assign_rhs_to_tree (gimple stmt
)
103 enum gimple_rhs_class grhs_class
;
105 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
107 if (grhs_class
== GIMPLE_TERNARY_RHS
)
108 t
= build3 (gimple_assign_rhs_code (stmt
),
109 TREE_TYPE (gimple_assign_lhs (stmt
)),
110 gimple_assign_rhs1 (stmt
),
111 gimple_assign_rhs2 (stmt
),
112 gimple_assign_rhs3 (stmt
));
113 else if (grhs_class
== GIMPLE_BINARY_RHS
)
114 t
= build2 (gimple_assign_rhs_code (stmt
),
115 TREE_TYPE (gimple_assign_lhs (stmt
)),
116 gimple_assign_rhs1 (stmt
),
117 gimple_assign_rhs2 (stmt
));
118 else if (grhs_class
== GIMPLE_UNARY_RHS
)
119 t
= build1 (gimple_assign_rhs_code (stmt
),
120 TREE_TYPE (gimple_assign_lhs (stmt
)),
121 gimple_assign_rhs1 (stmt
));
122 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
124 t
= gimple_assign_rhs1 (stmt
);
125 /* Avoid modifying this tree in place below. */
126 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
127 && gimple_location (stmt
) != EXPR_LOCATION (t
))
128 || (gimple_block (stmt
)
129 && currently_expanding_to_rtl
136 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
137 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
143 #ifndef STACK_ALIGNMENT_NEEDED
144 #define STACK_ALIGNMENT_NEEDED 1
147 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
149 /* Associate declaration T with storage space X. If T is no
150 SSA name this is exactly SET_DECL_RTL, otherwise make the
151 partition of T associated with X. */
153 set_rtl (tree t
, rtx x
)
155 if (TREE_CODE (t
) == SSA_NAME
)
157 SA
.partition_to_pseudo
[var_to_partition (SA
.map
, t
)] = x
;
159 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t
), x
);
160 /* For the benefit of debug information at -O0 (where vartracking
161 doesn't run) record the place also in the base DECL if it's
162 a normal variable (not a parameter). */
163 if (x
&& x
!= pc_rtx
&& TREE_CODE (SSA_NAME_VAR (t
)) == VAR_DECL
)
165 tree var
= SSA_NAME_VAR (t
);
166 /* If we don't yet have something recorded, just record it now. */
167 if (!DECL_RTL_SET_P (var
))
168 SET_DECL_RTL (var
, x
);
169 /* If we have it set already to "multiple places" don't
171 else if (DECL_RTL (var
) == pc_rtx
)
173 /* If we have something recorded and it's not the same place
174 as we want to record now, we have multiple partitions for the
175 same base variable, with different places. We can't just
176 randomly chose one, hence we have to say that we don't know.
177 This only happens with optimization, and there var-tracking
178 will figure out the right thing. */
179 else if (DECL_RTL (var
) != x
)
180 SET_DECL_RTL (var
, pc_rtx
);
187 /* This structure holds data relevant to one variable that will be
188 placed in a stack slot. */
194 /* Initially, the size of the variable. Later, the size of the partition,
195 if this variable becomes it's partition's representative. */
198 /* The *byte* alignment required for this variable. Or as, with the
199 size, the alignment for this partition. */
202 /* The partition representative. */
203 size_t representative
;
205 /* The next stack variable in the partition, or EOC. */
208 /* The numbers of conflicting stack variables. */
212 #define EOC ((size_t)-1)
214 /* We have an array of such objects while deciding allocation. */
215 static struct stack_var
*stack_vars
;
216 static size_t stack_vars_alloc
;
217 static size_t stack_vars_num
;
218 static hash_map
<tree
, size_t> *decl_to_stack_part
;
220 /* Conflict bitmaps go on this obstack. This allows us to destroy
221 all of them in one big sweep. */
222 static bitmap_obstack stack_var_bitmap_obstack
;
224 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
225 is non-decreasing. */
226 static size_t *stack_vars_sorted
;
228 /* The phase of the stack frame. This is the known misalignment of
229 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
230 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
231 static int frame_phase
;
233 /* Used during expand_used_vars to remember if we saw any decls for
234 which we'd like to enable stack smashing protection. */
235 static bool has_protected_decls
;
237 /* Used during expand_used_vars. Remember if we say a character buffer
238 smaller than our cutoff threshold. Used for -Wstack-protector. */
239 static bool has_short_buffer
;
241 /* Compute the byte alignment to use for DECL. Ignore alignment
242 we can't do with expected alignment of the stack boundary. */
245 align_local_variable (tree decl
)
247 unsigned int align
= LOCAL_DECL_ALIGNMENT (decl
);
248 DECL_ALIGN (decl
) = align
;
249 return align
/ BITS_PER_UNIT
;
252 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
253 Return the frame offset. */
256 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
258 HOST_WIDE_INT offset
, new_frame_offset
;
260 new_frame_offset
= frame_offset
;
261 if (FRAME_GROWS_DOWNWARD
)
263 new_frame_offset
-= size
+ frame_phase
;
264 new_frame_offset
&= -align
;
265 new_frame_offset
+= frame_phase
;
266 offset
= new_frame_offset
;
270 new_frame_offset
-= frame_phase
;
271 new_frame_offset
+= align
- 1;
272 new_frame_offset
&= -align
;
273 new_frame_offset
+= frame_phase
;
274 offset
= new_frame_offset
;
275 new_frame_offset
+= size
;
277 frame_offset
= new_frame_offset
;
279 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
280 frame_offset
= offset
= 0;
285 /* Accumulate DECL into STACK_VARS. */
288 add_stack_var (tree decl
)
292 if (stack_vars_num
>= stack_vars_alloc
)
294 if (stack_vars_alloc
)
295 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
297 stack_vars_alloc
= 32;
299 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
301 if (!decl_to_stack_part
)
302 decl_to_stack_part
= new hash_map
<tree
, size_t>;
304 v
= &stack_vars
[stack_vars_num
];
305 decl_to_stack_part
->put (decl
, stack_vars_num
);
308 v
->size
= tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl
)));
309 /* Ensure that all variables have size, so that &a != &b for any two
310 variables that are simultaneously live. */
313 v
->alignb
= align_local_variable (SSAVAR (decl
));
314 /* An alignment of zero can mightily confuse us later. */
315 gcc_assert (v
->alignb
!= 0);
317 /* All variables are initially in their own partition. */
318 v
->representative
= stack_vars_num
;
321 /* All variables initially conflict with no other. */
324 /* Ensure that this decl doesn't get put onto the list twice. */
325 set_rtl (decl
, pc_rtx
);
330 /* Make the decls associated with luid's X and Y conflict. */
333 add_stack_var_conflict (size_t x
, size_t y
)
335 struct stack_var
*a
= &stack_vars
[x
];
336 struct stack_var
*b
= &stack_vars
[y
];
338 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
340 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
341 bitmap_set_bit (a
->conflicts
, y
);
342 bitmap_set_bit (b
->conflicts
, x
);
345 /* Check whether the decls associated with luid's X and Y conflict. */
348 stack_var_conflict_p (size_t x
, size_t y
)
350 struct stack_var
*a
= &stack_vars
[x
];
351 struct stack_var
*b
= &stack_vars
[y
];
354 /* Partitions containing an SSA name result from gimple registers
355 with things like unsupported modes. They are top-level and
356 hence conflict with everything else. */
357 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
360 if (!a
->conflicts
|| !b
->conflicts
)
362 return bitmap_bit_p (a
->conflicts
, y
);
365 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
366 enter its partition number into bitmap DATA. */
369 visit_op (gimple
, tree op
, tree
, void *data
)
371 bitmap active
= (bitmap
)data
;
372 op
= get_base_address (op
);
375 && DECL_RTL_IF_SET (op
) == pc_rtx
)
377 size_t *v
= decl_to_stack_part
->get (op
);
379 bitmap_set_bit (active
, *v
);
384 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
385 record conflicts between it and all currently active other partitions
389 visit_conflict (gimple
, tree op
, tree
, void *data
)
391 bitmap active
= (bitmap
)data
;
392 op
= get_base_address (op
);
395 && DECL_RTL_IF_SET (op
) == pc_rtx
)
397 size_t *v
= decl_to_stack_part
->get (op
);
398 if (v
&& bitmap_set_bit (active
, *v
))
403 gcc_assert (num
< stack_vars_num
);
404 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
405 add_stack_var_conflict (num
, i
);
411 /* Helper routine for add_scope_conflicts, calculating the active partitions
412 at the end of BB, leaving the result in WORK. We're called to generate
413 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
417 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
421 gimple_stmt_iterator gsi
;
422 walk_stmt_load_store_addr_fn visit
;
425 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
426 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
430 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
432 gimple stmt
= gsi_stmt (gsi
);
433 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
435 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
437 gimple stmt
= gsi_stmt (gsi
);
439 if (gimple_clobber_p (stmt
))
441 tree lhs
= gimple_assign_lhs (stmt
);
443 /* Nested function lowering might introduce LHSs
444 that are COMPONENT_REFs. */
445 if (TREE_CODE (lhs
) != VAR_DECL
)
447 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
448 && (v
= decl_to_stack_part
->get (lhs
)))
449 bitmap_clear_bit (work
, *v
);
451 else if (!is_gimple_debug (stmt
))
454 && visit
== visit_op
)
456 /* If this is the first real instruction in this BB we need
457 to add conflicts for everything live at this point now.
458 Unlike classical liveness for named objects we can't
459 rely on seeing a def/use of the names we're interested in.
460 There might merely be indirect loads/stores. We'd not add any
461 conflicts for such partitions. */
464 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
466 struct stack_var
*a
= &stack_vars
[i
];
468 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
469 bitmap_ior_into (a
->conflicts
, work
);
471 visit
= visit_conflict
;
473 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
478 /* Generate stack partition conflicts between all partitions that are
479 simultaneously live. */
482 add_scope_conflicts (void)
486 bitmap work
= BITMAP_ALLOC (NULL
);
490 /* We approximate the live range of a stack variable by taking the first
491 mention of its name as starting point(s), and by the end-of-scope
492 death clobber added by gimplify as ending point(s) of the range.
493 This overapproximates in the case we for instance moved an address-taken
494 operation upward, without also moving a dereference to it upwards.
495 But it's conservatively correct as a variable never can hold values
496 before its name is mentioned at least once.
498 We then do a mostly classical bitmap liveness algorithm. */
500 FOR_ALL_BB_FN (bb
, cfun
)
501 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
503 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
504 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
511 for (i
= 0; i
< n_bbs
; i
++)
514 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
515 active
= (bitmap
)bb
->aux
;
516 add_scope_conflicts_1 (bb
, work
, false);
517 if (bitmap_ior_into (active
, work
))
522 FOR_EACH_BB_FN (bb
, cfun
)
523 add_scope_conflicts_1 (bb
, work
, true);
527 FOR_ALL_BB_FN (bb
, cfun
)
528 BITMAP_FREE (bb
->aux
);
531 /* A subroutine of partition_stack_vars. A comparison function for qsort,
532 sorting an array of indices by the properties of the object. */
535 stack_var_cmp (const void *a
, const void *b
)
537 size_t ia
= *(const size_t *)a
;
538 size_t ib
= *(const size_t *)b
;
539 unsigned int aligna
= stack_vars
[ia
].alignb
;
540 unsigned int alignb
= stack_vars
[ib
].alignb
;
541 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
542 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
543 tree decla
= stack_vars
[ia
].decl
;
544 tree declb
= stack_vars
[ib
].decl
;
546 unsigned int uida
, uidb
;
548 /* Primary compare on "large" alignment. Large comes first. */
549 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
550 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
551 if (largea
!= largeb
)
552 return (int)largeb
- (int)largea
;
554 /* Secondary compare on size, decreasing */
560 /* Tertiary compare on true alignment, decreasing. */
566 /* Final compare on ID for sort stability, increasing.
567 Two SSA names are compared by their version, SSA names come before
568 non-SSA names, and two normal decls are compared by their DECL_UID. */
569 if (TREE_CODE (decla
) == SSA_NAME
)
571 if (TREE_CODE (declb
) == SSA_NAME
)
572 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
576 else if (TREE_CODE (declb
) == SSA_NAME
)
579 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
587 struct part_traits
: default_hashmap_traits
592 { return e
.m_value
== reinterpret_cast<void *> (1); }
594 template<typename T
> static bool is_empty (T
&e
) { return e
.m_value
== NULL
; }
598 { e
.m_value
= reinterpret_cast<T
> (1); }
603 { e
.m_value
= NULL
; }
606 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
608 /* If the points-to solution *PI points to variables that are in a partition
609 together with other variables add all partition members to the pointed-to
613 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
614 part_hashmap
*decls_to_partitions
,
615 hash_set
<bitmap
> *visited
, bitmap temp
)
623 /* The pointed-to vars bitmap is shared, it is enough to
625 || visited
->add (pt
->vars
))
630 /* By using a temporary bitmap to store all members of the partitions
631 we have to add we make sure to visit each of the partitions only
633 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
635 || !bitmap_bit_p (temp
, i
))
636 && (part
= decls_to_partitions
->get (i
)))
637 bitmap_ior_into (temp
, *part
);
638 if (!bitmap_empty_p (temp
))
639 bitmap_ior_into (pt
->vars
, temp
);
642 /* Update points-to sets based on partition info, so we can use them on RTL.
643 The bitmaps representing stack partitions will be saved until expand,
644 where partitioned decls used as bases in memory expressions will be
648 update_alias_info_with_stack_vars (void)
650 part_hashmap
*decls_to_partitions
= NULL
;
652 tree var
= NULL_TREE
;
654 for (i
= 0; i
< stack_vars_num
; i
++)
658 struct ptr_info_def
*pi
;
660 /* Not interested in partitions with single variable. */
661 if (stack_vars
[i
].representative
!= i
662 || stack_vars
[i
].next
== EOC
)
665 if (!decls_to_partitions
)
667 decls_to_partitions
= new part_hashmap
;
668 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
671 /* Create an SSA_NAME that points to the partition for use
672 as base during alias-oracle queries on RTL for bases that
673 have been partitioned. */
674 if (var
== NULL_TREE
)
675 var
= create_tmp_var (ptr_type_node
, NULL
);
676 name
= make_ssa_name (var
, NULL
);
678 /* Create bitmaps representing partitions. They will be used for
679 points-to sets later, so use GGC alloc. */
680 part
= BITMAP_GGC_ALLOC ();
681 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
683 tree decl
= stack_vars
[j
].decl
;
684 unsigned int uid
= DECL_PT_UID (decl
);
685 bitmap_set_bit (part
, uid
);
686 decls_to_partitions
->put (uid
, part
);
687 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
688 if (TREE_ADDRESSABLE (decl
))
689 TREE_ADDRESSABLE (name
) = 1;
692 /* Make the SSA name point to all partition members. */
693 pi
= get_ptr_info (name
);
694 pt_solution_set (&pi
->pt
, part
, false);
697 /* Make all points-to sets that contain one member of a partition
698 contain all members of the partition. */
699 if (decls_to_partitions
)
702 hash_set
<bitmap
> visited
;
703 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
705 for (i
= 1; i
< num_ssa_names
; i
++)
707 tree name
= ssa_name (i
);
708 struct ptr_info_def
*pi
;
711 && POINTER_TYPE_P (TREE_TYPE (name
))
712 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
713 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
717 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
718 decls_to_partitions
, &visited
, temp
);
720 delete decls_to_partitions
;
725 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
726 partitioning algorithm. Partitions A and B are known to be non-conflicting.
727 Merge them into a single partition A. */
730 union_stack_vars (size_t a
, size_t b
)
732 struct stack_var
*vb
= &stack_vars
[b
];
736 gcc_assert (stack_vars
[b
].next
== EOC
);
737 /* Add B to A's partition. */
738 stack_vars
[b
].next
= stack_vars
[a
].next
;
739 stack_vars
[b
].representative
= a
;
740 stack_vars
[a
].next
= b
;
742 /* Update the required alignment of partition A to account for B. */
743 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
744 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
746 /* Update the interference graph and merge the conflicts. */
749 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
750 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
751 BITMAP_FREE (vb
->conflicts
);
755 /* A subroutine of expand_used_vars. Binpack the variables into
756 partitions constrained by the interference graph. The overall
757 algorithm used is as follows:
759 Sort the objects by size in descending order.
764 Look for the largest non-conflicting object B with size <= S.
771 partition_stack_vars (void)
773 size_t si
, sj
, n
= stack_vars_num
;
775 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
776 for (si
= 0; si
< n
; ++si
)
777 stack_vars_sorted
[si
] = si
;
782 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
784 for (si
= 0; si
< n
; ++si
)
786 size_t i
= stack_vars_sorted
[si
];
787 unsigned int ialign
= stack_vars
[i
].alignb
;
788 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
790 /* Ignore objects that aren't partition representatives. If we
791 see a var that is not a partition representative, it must
792 have been merged earlier. */
793 if (stack_vars
[i
].representative
!= i
)
796 for (sj
= si
+ 1; sj
< n
; ++sj
)
798 size_t j
= stack_vars_sorted
[sj
];
799 unsigned int jalign
= stack_vars
[j
].alignb
;
800 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
802 /* Ignore objects that aren't partition representatives. */
803 if (stack_vars
[j
].representative
!= j
)
806 /* Do not mix objects of "small" (supported) alignment
807 and "large" (unsupported) alignment. */
808 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
809 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
812 /* For Address Sanitizer do not mix objects with different
813 sizes, as the shorter vars wouldn't be adequately protected.
814 Don't do that for "large" (unsupported) alignment objects,
815 those aren't protected anyway. */
816 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
&& isize
!= jsize
817 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
820 /* Ignore conflicting objects. */
821 if (stack_var_conflict_p (i
, j
))
824 /* UNION the objects, placing J at OFFSET. */
825 union_stack_vars (i
, j
);
829 update_alias_info_with_stack_vars ();
832 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
835 dump_stack_var_partition (void)
837 size_t si
, i
, j
, n
= stack_vars_num
;
839 for (si
= 0; si
< n
; ++si
)
841 i
= stack_vars_sorted
[si
];
843 /* Skip variables that aren't partition representatives, for now. */
844 if (stack_vars
[i
].representative
!= i
)
847 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
848 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
849 stack_vars
[i
].alignb
);
851 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
853 fputc ('\t', dump_file
);
854 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
856 fputc ('\n', dump_file
);
860 /* Assign rtl to DECL at BASE + OFFSET. */
863 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
864 HOST_WIDE_INT offset
)
869 /* If this fails, we've overflowed the stack frame. Error nicely? */
870 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
872 x
= plus_constant (Pmode
, base
, offset
);
873 x
= gen_rtx_MEM (DECL_MODE (SSAVAR (decl
)), x
);
875 if (TREE_CODE (decl
) != SSA_NAME
)
877 /* Set alignment we actually gave this decl if it isn't an SSA name.
878 If it is we generate stack slots only accidentally so it isn't as
879 important, we'll simply use the alignment that is already set. */
880 if (base
== virtual_stack_vars_rtx
)
881 offset
-= frame_phase
;
882 align
= offset
& -offset
;
883 align
*= BITS_PER_UNIT
;
884 if (align
== 0 || align
> base_align
)
887 /* One would think that we could assert that we're not decreasing
888 alignment here, but (at least) the i386 port does exactly this
889 via the MINIMUM_ALIGNMENT hook. */
891 DECL_ALIGN (decl
) = align
;
892 DECL_USER_ALIGN (decl
) = 0;
895 set_mem_attributes (x
, SSAVAR (decl
), true);
899 struct stack_vars_data
901 /* Vector of offset pairs, always end of some padding followed
902 by start of the padding that needs Address Sanitizer protection.
903 The vector is in reversed, highest offset pairs come first. */
904 vec
<HOST_WIDE_INT
> asan_vec
;
906 /* Vector of partition representative decls in between the paddings. */
907 vec
<tree
> asan_decl_vec
;
909 /* Base pseudo register for Address Sanitizer protected automatic vars. */
912 /* Alignment needed for the Address Sanitizer protected automatic vars. */
913 unsigned int asan_alignb
;
916 /* A subroutine of expand_used_vars. Give each partition representative
917 a unique location within the stack frame. Update each partition member
918 with that location. */
921 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
923 size_t si
, i
, j
, n
= stack_vars_num
;
924 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
925 rtx large_base
= NULL
;
926 unsigned large_align
= 0;
929 /* Determine if there are any variables requiring "large" alignment.
930 Since these are dynamically allocated, we only process these if
931 no predicate involved. */
932 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
933 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
935 /* Find the total size of these variables. */
936 for (si
= 0; si
< n
; ++si
)
940 i
= stack_vars_sorted
[si
];
941 alignb
= stack_vars
[i
].alignb
;
943 /* Stop when we get to the first decl with "small" alignment. */
944 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
947 /* Skip variables that aren't partition representatives. */
948 if (stack_vars
[i
].representative
!= i
)
951 /* Skip variables that have already had rtl assigned. See also
952 add_stack_var where we perpetrate this pc_rtx hack. */
953 decl
= stack_vars
[i
].decl
;
954 if ((TREE_CODE (decl
) == SSA_NAME
955 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
956 : DECL_RTL (decl
)) != pc_rtx
)
959 large_size
+= alignb
- 1;
960 large_size
&= -(HOST_WIDE_INT
)alignb
;
961 large_size
+= stack_vars
[i
].size
;
964 /* If there were any, allocate space. */
966 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
970 for (si
= 0; si
< n
; ++si
)
973 unsigned base_align
, alignb
;
974 HOST_WIDE_INT offset
;
976 i
= stack_vars_sorted
[si
];
978 /* Skip variables that aren't partition representatives, for now. */
979 if (stack_vars
[i
].representative
!= i
)
982 /* Skip variables that have already had rtl assigned. See also
983 add_stack_var where we perpetrate this pc_rtx hack. */
984 decl
= stack_vars
[i
].decl
;
985 if ((TREE_CODE (decl
) == SSA_NAME
986 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
987 : DECL_RTL (decl
)) != pc_rtx
)
990 /* Check the predicate to see whether this variable should be
991 allocated in this pass. */
992 if (pred
&& !pred (i
))
995 alignb
= stack_vars
[i
].alignb
;
996 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
998 base
= virtual_stack_vars_rtx
;
999 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
&& pred
)
1001 HOST_WIDE_INT prev_offset
= frame_offset
;
1002 tree repr_decl
= NULL_TREE
;
1005 = alloc_stack_frame_space (stack_vars
[i
].size
1006 + ASAN_RED_ZONE_SIZE
,
1007 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
1008 data
->asan_vec
.safe_push (prev_offset
);
1009 data
->asan_vec
.safe_push (offset
+ stack_vars
[i
].size
);
1010 /* Find best representative of the partition.
1011 Prefer those with DECL_NAME, even better
1012 satisfying asan_protect_stack_decl predicate. */
1013 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1014 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1015 && DECL_NAME (stack_vars
[j
].decl
))
1017 repr_decl
= stack_vars
[j
].decl
;
1020 else if (repr_decl
== NULL_TREE
1021 && DECL_P (stack_vars
[j
].decl
)
1022 && DECL_NAME (stack_vars
[j
].decl
))
1023 repr_decl
= stack_vars
[j
].decl
;
1024 if (repr_decl
== NULL_TREE
)
1025 repr_decl
= stack_vars
[i
].decl
;
1026 data
->asan_decl_vec
.safe_push (repr_decl
);
1027 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1028 if (data
->asan_base
== NULL
)
1029 data
->asan_base
= gen_reg_rtx (Pmode
);
1030 base
= data
->asan_base
;
1032 if (!STRICT_ALIGNMENT
)
1033 base_align
= crtl
->max_used_stack_slot_alignment
;
1035 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1036 GET_MODE_ALIGNMENT (SImode
)
1037 << ASAN_SHADOW_SHIFT
);
1041 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1042 base_align
= crtl
->max_used_stack_slot_alignment
;
1047 /* Large alignment is only processed in the last pass. */
1050 gcc_assert (large_base
!= NULL
);
1052 large_alloc
+= alignb
- 1;
1053 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
1054 offset
= large_alloc
;
1055 large_alloc
+= stack_vars
[i
].size
;
1058 base_align
= large_align
;
1061 /* Create rtl for each variable based on their location within the
1063 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1065 expand_one_stack_var_at (stack_vars
[j
].decl
,
1071 gcc_assert (large_alloc
== large_size
);
1074 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1075 static HOST_WIDE_INT
1076 account_stack_vars (void)
1078 size_t si
, j
, i
, n
= stack_vars_num
;
1079 HOST_WIDE_INT size
= 0;
1081 for (si
= 0; si
< n
; ++si
)
1083 i
= stack_vars_sorted
[si
];
1085 /* Skip variables that aren't partition representatives, for now. */
1086 if (stack_vars
[i
].representative
!= i
)
1089 size
+= stack_vars
[i
].size
;
1090 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1091 set_rtl (stack_vars
[j
].decl
, NULL
);
1096 /* A subroutine of expand_one_var. Called to immediately assign rtl
1097 to a variable to be allocated in the stack frame. */
1100 expand_one_stack_var (tree var
)
1102 HOST_WIDE_INT size
, offset
;
1103 unsigned byte_align
;
1105 size
= tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var
)));
1106 byte_align
= align_local_variable (SSAVAR (var
));
1108 /* We handle highly aligned variables in expand_stack_vars. */
1109 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1111 offset
= alloc_stack_frame_space (size
, byte_align
);
1113 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1114 crtl
->max_used_stack_slot_alignment
, offset
);
1117 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1118 that will reside in a hard register. */
1121 expand_one_hard_reg_var (tree var
)
1123 rest_of_decl_compilation (var
, 0, 0);
1126 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1127 that will reside in a pseudo register. */
1130 expand_one_register_var (tree var
)
1132 tree decl
= SSAVAR (var
);
1133 tree type
= TREE_TYPE (decl
);
1134 enum machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1135 rtx x
= gen_reg_rtx (reg_mode
);
1139 /* Note if the object is a user variable. */
1140 if (!DECL_ARTIFICIAL (decl
))
1143 if (POINTER_TYPE_P (type
))
1144 mark_reg_pointer (x
, get_pointer_alignment (var
));
1147 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1148 has some associated error, e.g. its type is error-mark. We just need
1149 to pick something that won't crash the rest of the compiler. */
1152 expand_one_error_var (tree var
)
1154 enum machine_mode mode
= DECL_MODE (var
);
1157 if (mode
== BLKmode
)
1158 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1159 else if (mode
== VOIDmode
)
1162 x
= gen_reg_rtx (mode
);
1164 SET_DECL_RTL (var
, x
);
1167 /* A subroutine of expand_one_var. VAR is a variable that will be
1168 allocated to the local stack frame. Return true if we wish to
1169 add VAR to STACK_VARS so that it will be coalesced with other
1170 variables. Return false to allocate VAR immediately.
1172 This function is used to reduce the number of variables considered
1173 for coalescing, which reduces the size of the quadratic problem. */
1176 defer_stack_allocation (tree var
, bool toplevel
)
1178 /* Whether the variable is small enough for immediate allocation not to be
1179 a problem with regard to the frame size. */
1181 = ((HOST_WIDE_INT
) tree_to_uhwi (DECL_SIZE_UNIT (var
))
1182 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
));
1184 /* If stack protection is enabled, *all* stack variables must be deferred,
1185 so that we can re-order the strings to the top of the frame.
1186 Similarly for Address Sanitizer. */
1187 if (flag_stack_protect
|| ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
))
1190 /* We handle "large" alignment via dynamic allocation. We want to handle
1191 this extra complication in only one place, so defer them. */
1192 if (DECL_ALIGN (var
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
1195 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1196 might be detached from their block and appear at toplevel when we reach
1197 here. We want to coalesce them with variables from other blocks when
1198 the immediate contribution to the frame size would be noticeable. */
1199 if (toplevel
&& optimize
> 0 && DECL_IGNORED_P (var
) && !smallish
)
1202 /* Variables declared in the outermost scope automatically conflict
1203 with every other variable. The only reason to want to defer them
1204 at all is that, after sorting, we can more efficiently pack
1205 small variables in the stack frame. Continue to defer at -O2. */
1206 if (toplevel
&& optimize
< 2)
1209 /* Without optimization, *most* variables are allocated from the
1210 stack, which makes the quadratic problem large exactly when we
1211 want compilation to proceed as quickly as possible. On the
1212 other hand, we don't want the function's stack frame size to
1213 get completely out of hand. So we avoid adding scalars and
1214 "small" aggregates to the list at all. */
1215 if (optimize
== 0 && smallish
)
1221 /* A subroutine of expand_used_vars. Expand one variable according to
1222 its flavor. Variables to be placed on the stack are not actually
1223 expanded yet, merely recorded.
1224 When REALLY_EXPAND is false, only add stack values to be allocated.
1225 Return stack usage this variable is supposed to take.
1228 static HOST_WIDE_INT
1229 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1231 unsigned int align
= BITS_PER_UNIT
;
1236 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
1238 /* Because we don't know if VAR will be in register or on stack,
1239 we conservatively assume it will be on stack even if VAR is
1240 eventually put into register after RA pass. For non-automatic
1241 variables, which won't be on stack, we collect alignment of
1242 type and ignore user specified alignment. Similarly for
1243 SSA_NAMEs for which use_register_for_decl returns true. */
1244 if (TREE_STATIC (var
)
1245 || DECL_EXTERNAL (var
)
1246 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1247 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1248 TYPE_MODE (TREE_TYPE (var
)),
1249 TYPE_ALIGN (TREE_TYPE (var
)));
1250 else if (DECL_HAS_VALUE_EXPR_P (var
)
1251 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1252 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1253 or variables which were assigned a stack slot already by
1254 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1255 changed from the offset chosen to it. */
1256 align
= crtl
->stack_alignment_estimated
;
1258 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1260 /* If the variable alignment is very large we'll dynamicaly allocate
1261 it, which means that in-frame portion is just a pointer. */
1262 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1263 align
= POINTER_SIZE
;
1266 if (SUPPORTS_STACK_ALIGNMENT
1267 && crtl
->stack_alignment_estimated
< align
)
1269 /* stack_alignment_estimated shouldn't change after stack
1270 realign decision made */
1271 gcc_assert (!crtl
->stack_realign_processed
);
1272 crtl
->stack_alignment_estimated
= align
;
1275 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1276 So here we only make sure stack_alignment_needed >= align. */
1277 if (crtl
->stack_alignment_needed
< align
)
1278 crtl
->stack_alignment_needed
= align
;
1279 if (crtl
->max_used_stack_slot_alignment
< align
)
1280 crtl
->max_used_stack_slot_alignment
= align
;
1282 if (TREE_CODE (origvar
) == SSA_NAME
)
1284 gcc_assert (TREE_CODE (var
) != VAR_DECL
1285 || (!DECL_EXTERNAL (var
)
1286 && !DECL_HAS_VALUE_EXPR_P (var
)
1287 && !TREE_STATIC (var
)
1288 && TREE_TYPE (var
) != error_mark_node
1289 && !DECL_HARD_REGISTER (var
)
1292 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1294 else if (DECL_EXTERNAL (var
))
1296 else if (DECL_HAS_VALUE_EXPR_P (var
))
1298 else if (TREE_STATIC (var
))
1300 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1302 else if (TREE_TYPE (var
) == error_mark_node
)
1305 expand_one_error_var (var
);
1307 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1311 expand_one_hard_reg_var (var
);
1312 if (!DECL_HARD_REGISTER (var
))
1313 /* Invalid register specification. */
1314 expand_one_error_var (var
);
1317 else if (use_register_for_decl (var
))
1320 expand_one_register_var (origvar
);
1322 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1324 /* Reject variables which cover more than half of the address-space. */
1327 error ("size of variable %q+D is too large", var
);
1328 expand_one_error_var (var
);
1331 else if (defer_stack_allocation (var
, toplevel
))
1332 add_stack_var (origvar
);
1336 expand_one_stack_var (origvar
);
1337 return tree_to_uhwi (DECL_SIZE_UNIT (var
));
1342 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1343 expanding variables. Those variables that can be put into registers
1344 are allocated pseudos; those that can't are put on the stack.
1346 TOPLEVEL is true if this is the outermost BLOCK. */
1349 expand_used_vars_for_block (tree block
, bool toplevel
)
1353 /* Expand all variables at this level. */
1354 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1356 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1357 || !DECL_NONSHAREABLE (t
)))
1358 expand_one_var (t
, toplevel
, true);
1360 /* Expand all variables at containing levels. */
1361 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1362 expand_used_vars_for_block (t
, false);
1365 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1366 and clear TREE_USED on all local variables. */
1369 clear_tree_used (tree block
)
1373 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1374 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1375 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1376 || !DECL_NONSHAREABLE (t
))
1379 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1380 clear_tree_used (t
);
1384 SPCT_FLAG_DEFAULT
= 1,
1386 SPCT_FLAG_STRONG
= 3
1389 /* Examine TYPE and determine a bit mask of the following features. */
1391 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1392 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1393 #define SPCT_HAS_ARRAY 4
1394 #define SPCT_HAS_AGGREGATE 8
1397 stack_protect_classify_type (tree type
)
1399 unsigned int ret
= 0;
1402 switch (TREE_CODE (type
))
1405 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1406 if (t
== char_type_node
1407 || t
== signed_char_type_node
1408 || t
== unsigned_char_type_node
)
1410 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1411 unsigned HOST_WIDE_INT len
;
1413 if (!TYPE_SIZE_UNIT (type
)
1414 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1417 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1420 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1422 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1425 ret
= SPCT_HAS_ARRAY
;
1429 case QUAL_UNION_TYPE
:
1431 ret
= SPCT_HAS_AGGREGATE
;
1432 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1433 if (TREE_CODE (t
) == FIELD_DECL
)
1434 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1444 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1445 part of the local stack frame. Remember if we ever return nonzero for
1446 any variable in this function. The return value is the phase number in
1447 which the variable should be allocated. */
1450 stack_protect_decl_phase (tree decl
)
1452 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1455 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1456 has_short_buffer
= true;
1458 if (flag_stack_protect
== SPCT_FLAG_ALL
1459 || flag_stack_protect
== SPCT_FLAG_STRONG
)
1461 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1462 && !(bits
& SPCT_HAS_AGGREGATE
))
1464 else if (bits
& SPCT_HAS_ARRAY
)
1468 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1471 has_protected_decls
= true;
1476 /* Two helper routines that check for phase 1 and phase 2. These are used
1477 as callbacks for expand_stack_vars. */
1480 stack_protect_decl_phase_1 (size_t i
)
1482 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1486 stack_protect_decl_phase_2 (size_t i
)
1488 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1491 /* And helper function that checks for asan phase (with stack protector
1492 it is phase 3). This is used as callback for expand_stack_vars.
1493 Returns true if any of the vars in the partition need to be protected. */
1496 asan_decl_phase_3 (size_t i
)
1500 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1502 i
= stack_vars
[i
].next
;
1507 /* Ensure that variables in different stack protection phases conflict
1508 so that they are not merged and share the same stack slot. */
1511 add_stack_protection_conflicts (void)
1513 size_t i
, j
, n
= stack_vars_num
;
1514 unsigned char *phase
;
1516 phase
= XNEWVEC (unsigned char, n
);
1517 for (i
= 0; i
< n
; ++i
)
1518 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1520 for (i
= 0; i
< n
; ++i
)
1522 unsigned char ph_i
= phase
[i
];
1523 for (j
= i
+ 1; j
< n
; ++j
)
1524 if (ph_i
!= phase
[j
])
1525 add_stack_var_conflict (i
, j
);
1531 /* Create a decl for the guard at the top of the stack frame. */
1534 create_stack_guard (void)
1536 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1537 VAR_DECL
, NULL
, ptr_type_node
);
1538 TREE_THIS_VOLATILE (guard
) = 1;
1539 TREE_USED (guard
) = 1;
1540 expand_one_stack_var (guard
);
1541 crtl
->stack_protect_guard
= guard
;
1544 /* Prepare for expanding variables. */
1546 init_vars_expansion (void)
1548 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1549 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1551 /* A map from decl to stack partition. */
1552 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1554 /* Initialize local stack smashing state. */
1555 has_protected_decls
= false;
1556 has_short_buffer
= false;
1559 /* Free up stack variable graph data. */
1561 fini_vars_expansion (void)
1563 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1565 XDELETEVEC (stack_vars
);
1566 if (stack_vars_sorted
)
1567 XDELETEVEC (stack_vars_sorted
);
1569 stack_vars_sorted
= NULL
;
1570 stack_vars_alloc
= stack_vars_num
= 0;
1571 delete decl_to_stack_part
;
1572 decl_to_stack_part
= NULL
;
1575 /* Make a fair guess for the size of the stack frame of the function
1576 in NODE. This doesn't have to be exact, the result is only used in
1577 the inline heuristics. So we don't want to run the full stack var
1578 packing algorithm (which is quadratic in the number of stack vars).
1579 Instead, we calculate the total size of all stack vars. This turns
1580 out to be a pretty fair estimate -- packing of stack vars doesn't
1581 happen very often. */
1584 estimated_stack_frame_size (struct cgraph_node
*node
)
1586 HOST_WIDE_INT size
= 0;
1589 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1593 init_vars_expansion ();
1595 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1596 if (auto_var_in_fn_p (var
, fn
->decl
))
1597 size
+= expand_one_var (var
, true, false);
1599 if (stack_vars_num
> 0)
1601 /* Fake sorting the stack vars for account_stack_vars (). */
1602 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1603 for (i
= 0; i
< stack_vars_num
; ++i
)
1604 stack_vars_sorted
[i
] = i
;
1605 size
+= account_stack_vars ();
1608 fini_vars_expansion ();
1613 /* Helper routine to check if a record or union contains an array field. */
1616 record_or_union_type_has_array_p (const_tree tree_type
)
1618 tree fields
= TYPE_FIELDS (tree_type
);
1621 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1622 if (TREE_CODE (f
) == FIELD_DECL
)
1624 tree field_type
= TREE_TYPE (f
);
1625 if (RECORD_OR_UNION_TYPE_P (field_type
)
1626 && record_or_union_type_has_array_p (field_type
))
1628 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1634 /* Check if the current function has local referenced variables that
1635 have their addresses taken, contain an array, or are arrays. */
1638 stack_protect_decl_p ()
1643 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1644 if (!is_global_var (var
))
1646 tree var_type
= TREE_TYPE (var
);
1647 if (TREE_CODE (var
) == VAR_DECL
1648 && (TREE_CODE (var_type
) == ARRAY_TYPE
1649 || TREE_ADDRESSABLE (var
)
1650 || (RECORD_OR_UNION_TYPE_P (var_type
)
1651 && record_or_union_type_has_array_p (var_type
))))
1657 /* Check if the current function has calls that use a return slot. */
1660 stack_protect_return_slot_p ()
1664 FOR_ALL_BB_FN (bb
, cfun
)
1665 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
1666 !gsi_end_p (gsi
); gsi_next (&gsi
))
1668 gimple stmt
= gsi_stmt (gsi
);
1669 /* This assumes that calls to internal-only functions never
1670 use a return slot. */
1671 if (is_gimple_call (stmt
)
1672 && !gimple_call_internal_p (stmt
)
1673 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
1674 gimple_call_fndecl (stmt
)))
1680 /* Expand all variables used in the function. */
1683 expand_used_vars (void)
1685 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1686 vec
<tree
> maybe_local_decls
= vNULL
;
1687 rtx_insn
*var_end_seq
= NULL
;
1690 bool gen_stack_protect_signal
= false;
1692 /* Compute the phase of the stack frame for this function. */
1694 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1695 int off
= STARTING_FRAME_OFFSET
% align
;
1696 frame_phase
= off
? align
- off
: 0;
1699 /* Set TREE_USED on all variables in the local_decls. */
1700 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1701 TREE_USED (var
) = 1;
1702 /* Clear TREE_USED on all variables associated with a block scope. */
1703 clear_tree_used (DECL_INITIAL (current_function_decl
));
1705 init_vars_expansion ();
1707 hash_map
<tree
, tree
> ssa_name_decls
;
1708 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
1710 tree var
= partition_to_var (SA
.map
, i
);
1712 gcc_assert (!virtual_operand_p (var
));
1714 /* Assign decls to each SSA name partition, share decls for partitions
1715 we could have coalesced (those with the same type). */
1716 if (SSA_NAME_VAR (var
) == NULL_TREE
)
1718 tree
*slot
= &ssa_name_decls
.get_or_insert (TREE_TYPE (var
));
1720 *slot
= create_tmp_reg (TREE_TYPE (var
), NULL
);
1721 replace_ssa_name_symbol (var
, *slot
);
1724 /* Always allocate space for partitions based on VAR_DECLs. But for
1725 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1726 debug info, there is no need to do so if optimization is disabled
1727 because all the SSA_NAMEs based on these DECLs have been coalesced
1728 into a single partition, which is thus assigned the canonical RTL
1729 location of the DECLs. If in_lto_p, we can't rely on optimize,
1730 a function could be compiled with -O1 -flto first and only the
1731 link performed at -O0. */
1732 if (TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
1733 expand_one_var (var
, true, true);
1734 else if (DECL_IGNORED_P (SSA_NAME_VAR (var
)) || optimize
|| in_lto_p
)
1736 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1737 contain the default def (representing the parm or result itself)
1738 we don't do anything here. But those which don't contain the
1739 default def (representing a temporary based on the parm/result)
1740 we need to allocate space just like for normal VAR_DECLs. */
1741 if (!bitmap_bit_p (SA
.partition_has_default_def
, i
))
1743 expand_one_var (var
, true, true);
1744 gcc_assert (SA
.partition_to_pseudo
[i
]);
1749 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
1750 gen_stack_protect_signal
1751 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1753 /* At this point all variables on the local_decls with TREE_USED
1754 set are not associated with any block scope. Lay them out. */
1756 len
= vec_safe_length (cfun
->local_decls
);
1757 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1759 bool expand_now
= false;
1761 /* Expanded above already. */
1762 if (is_gimple_reg (var
))
1764 TREE_USED (var
) = 0;
1767 /* We didn't set a block for static or extern because it's hard
1768 to tell the difference between a global variable (re)declared
1769 in a local scope, and one that's really declared there to
1770 begin with. And it doesn't really matter much, since we're
1771 not giving them stack space. Expand them now. */
1772 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1775 /* Expand variables not associated with any block now. Those created by
1776 the optimizers could be live anywhere in the function. Those that
1777 could possibly have been scoped originally and detached from their
1778 block will have their allocation deferred so we coalesce them with
1779 others when optimization is enabled. */
1780 else if (TREE_USED (var
))
1783 /* Finally, mark all variables on the list as used. We'll use
1784 this in a moment when we expand those associated with scopes. */
1785 TREE_USED (var
) = 1;
1788 expand_one_var (var
, true, true);
1791 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
1793 rtx rtl
= DECL_RTL_IF_SET (var
);
1795 /* Keep artificial non-ignored vars in cfun->local_decls
1796 chain until instantiate_decls. */
1797 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1798 add_local_decl (cfun
, var
);
1799 else if (rtl
== NULL_RTX
)
1800 /* If rtl isn't set yet, which can happen e.g. with
1801 -fstack-protector, retry before returning from this
1803 maybe_local_decls
.safe_push (var
);
1807 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1809 +-----------------+-----------------+
1810 | ...processed... | ...duplicates...|
1811 +-----------------+-----------------+
1813 +-- LEN points here.
1815 We just want the duplicates, as those are the artificial
1816 non-ignored vars that we want to keep until instantiate_decls.
1817 Move them down and truncate the array. */
1818 if (!vec_safe_is_empty (cfun
->local_decls
))
1819 cfun
->local_decls
->block_remove (0, len
);
1821 /* At this point, all variables within the block tree with TREE_USED
1822 set are actually used by the optimized function. Lay them out. */
1823 expand_used_vars_for_block (outer_block
, true);
1825 if (stack_vars_num
> 0)
1827 add_scope_conflicts ();
1829 /* If stack protection is enabled, we don't share space between
1830 vulnerable data and non-vulnerable data. */
1831 if (flag_stack_protect
)
1832 add_stack_protection_conflicts ();
1834 /* Now that we have collected all stack variables, and have computed a
1835 minimal interference graph, attempt to save some stack space. */
1836 partition_stack_vars ();
1838 dump_stack_var_partition ();
1841 switch (flag_stack_protect
)
1844 create_stack_guard ();
1847 case SPCT_FLAG_STRONG
:
1848 if (gen_stack_protect_signal
1849 || cfun
->calls_alloca
|| has_protected_decls
)
1850 create_stack_guard ();
1853 case SPCT_FLAG_DEFAULT
:
1854 if (cfun
->calls_alloca
|| has_protected_decls
)
1855 create_stack_guard ();
1862 /* Assign rtl to each variable based on these partitions. */
1863 if (stack_vars_num
> 0)
1865 struct stack_vars_data data
;
1867 data
.asan_vec
= vNULL
;
1868 data
.asan_decl_vec
= vNULL
;
1869 data
.asan_base
= NULL_RTX
;
1870 data
.asan_alignb
= 0;
1872 /* Reorder decls to be protected by iterating over the variables
1873 array multiple times, and allocating out of each phase in turn. */
1874 /* ??? We could probably integrate this into the qsort we did
1875 earlier, such that we naturally see these variables first,
1876 and thus naturally allocate things in the right order. */
1877 if (has_protected_decls
)
1879 /* Phase 1 contains only character arrays. */
1880 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
1882 /* Phase 2 contains other kinds of arrays. */
1883 if (flag_stack_protect
== 2)
1884 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
1887 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
)
1888 /* Phase 3, any partitions that need asan protection
1889 in addition to phase 1 and 2. */
1890 expand_stack_vars (asan_decl_phase_3
, &data
);
1892 if (!data
.asan_vec
.is_empty ())
1894 HOST_WIDE_INT prev_offset
= frame_offset
;
1895 HOST_WIDE_INT offset
, sz
, redzonesz
;
1896 redzonesz
= ASAN_RED_ZONE_SIZE
;
1897 sz
= data
.asan_vec
[0] - prev_offset
;
1898 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
1899 && data
.asan_alignb
<= 4096
1900 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
1901 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
1902 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
1904 = alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
);
1905 data
.asan_vec
.safe_push (prev_offset
);
1906 data
.asan_vec
.safe_push (offset
);
1907 /* Leave space for alignment if STRICT_ALIGNMENT. */
1908 if (STRICT_ALIGNMENT
)
1909 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
1910 << ASAN_SHADOW_SHIFT
)
1911 / BITS_PER_UNIT
, 1);
1914 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
1917 data
.asan_vec
.address (),
1918 data
.asan_decl_vec
.address (),
1919 data
.asan_vec
.length ());
1922 expand_stack_vars (NULL
, &data
);
1924 data
.asan_vec
.release ();
1925 data
.asan_decl_vec
.release ();
1928 fini_vars_expansion ();
1930 /* If there were any artificial non-ignored vars without rtl
1931 found earlier, see if deferred stack allocation hasn't assigned
1933 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
1935 rtx rtl
= DECL_RTL_IF_SET (var
);
1937 /* Keep artificial non-ignored vars in cfun->local_decls
1938 chain until instantiate_decls. */
1939 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1940 add_local_decl (cfun
, var
);
1942 maybe_local_decls
.release ();
1944 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1945 if (STACK_ALIGNMENT_NEEDED
)
1947 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1948 if (!FRAME_GROWS_DOWNWARD
)
1949 frame_offset
+= align
- 1;
1950 frame_offset
&= -align
;
1957 /* If we need to produce a detailed dump, print the tree representation
1958 for STMT to the dump file. SINCE is the last RTX after which the RTL
1959 generated for STMT should have been appended. */
1962 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx_insn
*since
)
1964 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1966 fprintf (dump_file
, "\n;; ");
1967 print_gimple_stmt (dump_file
, stmt
, 0,
1968 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
1969 fprintf (dump_file
, "\n");
1971 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
1975 /* Maps the blocks that do not contain tree labels to rtx labels. */
1977 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
1979 /* Returns the label_rtx expression for a label starting basic block BB. */
1982 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
1984 gimple_stmt_iterator gsi
;
1988 if (bb
->flags
& BB_RTL
)
1989 return block_label (bb
);
1991 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
1995 /* Find the tree label if it is present. */
1997 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1999 lab_stmt
= gsi_stmt (gsi
);
2000 if (gimple_code (lab_stmt
) != GIMPLE_LABEL
)
2003 lab
= gimple_label_label (lab_stmt
);
2004 if (DECL_NONLOCAL (lab
))
2007 return label_rtx (lab
);
2010 rtx_code_label
*l
= gen_label_rtx ();
2011 lab_rtx_for_bb
->put (bb
, l
);
2016 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2017 of a basic block where we just expanded the conditional at the end,
2018 possibly clean up the CFG and instruction sequence. LAST is the
2019 last instruction before the just emitted jump sequence. */
2022 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2024 /* Special case: when jumpif decides that the condition is
2025 trivial it emits an unconditional jump (and the necessary
2026 barrier). But we still have two edges, the fallthru one is
2027 wrong. purge_dead_edges would clean this up later. Unfortunately
2028 we have to insert insns (and split edges) before
2029 find_many_sub_basic_blocks and hence before purge_dead_edges.
2030 But splitting edges might create new blocks which depend on the
2031 fact that if there are two edges there's no barrier. So the
2032 barrier would get lost and verify_flow_info would ICE. Instead
2033 of auditing all edge splitters to care for the barrier (which
2034 normally isn't there in a cleaned CFG), fix it here. */
2035 if (BARRIER_P (get_last_insn ()))
2039 /* Now, we have a single successor block, if we have insns to
2040 insert on the remaining edge we potentially will insert
2041 it at the end of this block (if the dest block isn't feasible)
2042 in order to avoid splitting the edge. This insertion will take
2043 place in front of the last jump. But we might have emitted
2044 multiple jumps (conditional and one unconditional) to the
2045 same destination. Inserting in front of the last one then
2046 is a problem. See PR 40021. We fix this by deleting all
2047 jumps except the last unconditional one. */
2048 insn
= PREV_INSN (get_last_insn ());
2049 /* Make sure we have an unconditional jump. Otherwise we're
2051 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2052 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2054 insn
= PREV_INSN (insn
);
2055 if (JUMP_P (NEXT_INSN (insn
)))
2057 if (!any_condjump_p (NEXT_INSN (insn
)))
2059 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2060 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2062 delete_insn (NEXT_INSN (insn
));
2068 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2069 Returns a new basic block if we've terminated the current basic
2070 block and created a new one. */
2073 expand_gimple_cond (basic_block bb
, gimple stmt
)
2075 basic_block new_bb
, dest
;
2079 rtx_insn
*last2
, *last
;
2080 enum tree_code code
;
2083 code
= gimple_cond_code (stmt
);
2084 op0
= gimple_cond_lhs (stmt
);
2085 op1
= gimple_cond_rhs (stmt
);
2086 /* We're sometimes presented with such code:
2090 This would expand to two comparisons which then later might
2091 be cleaned up by combine. But some pattern matchers like if-conversion
2092 work better when there's only one compare, so make up for this
2093 here as special exception if TER would have made the same change. */
2095 && TREE_CODE (op0
) == SSA_NAME
2096 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2097 && TREE_CODE (op1
) == INTEGER_CST
2098 && ((gimple_cond_code (stmt
) == NE_EXPR
2099 && integer_zerop (op1
))
2100 || (gimple_cond_code (stmt
) == EQ_EXPR
2101 && integer_onep (op1
)))
2102 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2104 gimple second
= SSA_NAME_DEF_STMT (op0
);
2105 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2107 enum tree_code code2
= gimple_assign_rhs_code (second
);
2108 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2111 op0
= gimple_assign_rhs1 (second
);
2112 op1
= gimple_assign_rhs2 (second
);
2114 /* If jumps are cheap turn some more codes into
2116 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2118 if ((code2
== BIT_AND_EXPR
2119 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2120 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2121 || code2
== TRUTH_AND_EXPR
)
2123 code
= TRUTH_ANDIF_EXPR
;
2124 op0
= gimple_assign_rhs1 (second
);
2125 op1
= gimple_assign_rhs2 (second
);
2127 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2129 code
= TRUTH_ORIF_EXPR
;
2130 op0
= gimple_assign_rhs1 (second
);
2131 op1
= gimple_assign_rhs2 (second
);
2137 last2
= last
= get_last_insn ();
2139 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2140 set_curr_insn_location (gimple_location (stmt
));
2142 /* These flags have no purpose in RTL land. */
2143 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2144 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2146 /* We can either have a pure conditional jump with one fallthru edge or
2147 two-way jump that needs to be decomposed into two basic blocks. */
2148 if (false_edge
->dest
== bb
->next_bb
)
2150 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2151 true_edge
->probability
);
2152 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2153 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2154 set_curr_insn_location (true_edge
->goto_locus
);
2155 false_edge
->flags
|= EDGE_FALLTHRU
;
2156 maybe_cleanup_end_of_block (false_edge
, last
);
2159 if (true_edge
->dest
== bb
->next_bb
)
2161 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2162 false_edge
->probability
);
2163 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2164 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2165 set_curr_insn_location (false_edge
->goto_locus
);
2166 true_edge
->flags
|= EDGE_FALLTHRU
;
2167 maybe_cleanup_end_of_block (true_edge
, last
);
2171 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2172 true_edge
->probability
);
2173 last
= get_last_insn ();
2174 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2175 set_curr_insn_location (false_edge
->goto_locus
);
2176 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2179 if (BARRIER_P (BB_END (bb
)))
2180 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2181 update_bb_for_insn (bb
);
2183 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2184 dest
= false_edge
->dest
;
2185 redirect_edge_succ (false_edge
, new_bb
);
2186 false_edge
->flags
|= EDGE_FALLTHRU
;
2187 new_bb
->count
= false_edge
->count
;
2188 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
2189 add_bb_to_loop (new_bb
, bb
->loop_father
);
2190 new_edge
= make_edge (new_bb
, dest
, 0);
2191 new_edge
->probability
= REG_BR_PROB_BASE
;
2192 new_edge
->count
= new_bb
->count
;
2193 if (BARRIER_P (BB_END (new_bb
)))
2194 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2195 update_bb_for_insn (new_bb
);
2197 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2199 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2201 set_curr_insn_location (true_edge
->goto_locus
);
2202 true_edge
->goto_locus
= curr_insn_location ();
2208 /* Mark all calls that can have a transaction restart. */
2211 mark_transaction_restart_calls (gimple stmt
)
2213 struct tm_restart_node dummy
;
2216 if (!cfun
->gimple_df
->tm_restart
)
2220 slot
= htab_find_slot (cfun
->gimple_df
->tm_restart
, &dummy
, NO_INSERT
);
2223 struct tm_restart_node
*n
= (struct tm_restart_node
*) *slot
;
2224 tree list
= n
->label_or_list
;
2227 for (insn
= next_real_insn (get_last_insn ());
2229 insn
= next_real_insn (insn
))
2232 if (TREE_CODE (list
) == LABEL_DECL
)
2233 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2235 for (; list
; list
= TREE_CHAIN (list
))
2236 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2240 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2244 expand_call_stmt (gimple stmt
)
2246 tree exp
, decl
, lhs
;
2250 if (gimple_call_internal_p (stmt
))
2252 expand_internal_call (stmt
);
2256 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2258 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2259 decl
= gimple_call_fndecl (stmt
);
2260 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2262 /* If this is not a builtin function, the function type through which the
2263 call is made may be different from the type of the function. */
2266 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2267 CALL_EXPR_FN (exp
));
2269 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2270 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2272 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2274 tree arg
= gimple_call_arg (stmt
, i
);
2276 /* TER addresses into arguments of builtin functions so we have a
2277 chance to infer more correct alignment information. See PR39954. */
2279 && TREE_CODE (arg
) == SSA_NAME
2280 && (def
= get_gimple_for_ssa_name (arg
))
2281 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2282 arg
= gimple_assign_rhs1 (def
);
2283 CALL_EXPR_ARG (exp
, i
) = arg
;
2286 if (gimple_has_side_effects (stmt
))
2287 TREE_SIDE_EFFECTS (exp
) = 1;
2289 if (gimple_call_nothrow_p (stmt
))
2290 TREE_NOTHROW (exp
) = 1;
2292 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2293 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2295 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2296 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
2297 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2298 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2300 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2301 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2302 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2304 /* Ensure RTL is created for debug args. */
2305 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2307 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2312 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2314 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2315 expand_debug_expr (dtemp
);
2319 lhs
= gimple_call_lhs (stmt
);
2321 expand_assignment (lhs
, exp
, false);
2323 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2325 mark_transaction_restart_calls (stmt
);
2329 /* Generate RTL for an asm statement (explicit assembler code).
2330 STRING is a STRING_CST node containing the assembler code text,
2331 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2332 insn is volatile; don't optimize it. */
2335 expand_asm_loc (tree string
, int vol
, location_t locus
)
2339 if (TREE_CODE (string
) == ADDR_EXPR
)
2340 string
= TREE_OPERAND (string
, 0);
2342 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2343 ggc_strdup (TREE_STRING_POINTER (string
)),
2346 MEM_VOLATILE_P (body
) = vol
;
2351 /* Return the number of times character C occurs in string S. */
2353 n_occurrences (int c
, const char *s
)
2361 /* A subroutine of expand_asm_operands. Check that all operands have
2362 the same number of alternatives. Return true if so. */
2365 check_operand_nalternatives (tree outputs
, tree inputs
)
2367 if (outputs
|| inputs
)
2369 tree tmp
= TREE_PURPOSE (outputs
? outputs
: inputs
);
2371 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp
)));
2374 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2376 error ("too many alternatives in %<asm%>");
2383 const char *constraint
2384 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp
)));
2386 if (n_occurrences (',', constraint
) != nalternatives
)
2388 error ("operand constraints for %<asm%> differ "
2389 "in number of alternatives");
2393 if (TREE_CHAIN (tmp
))
2394 tmp
= TREE_CHAIN (tmp
);
2396 tmp
= next
, next
= 0;
2403 /* Check for overlap between registers marked in CLOBBERED_REGS and
2404 anything inappropriate in T. Emit error and return the register
2405 variable definition for error, NULL_TREE for ok. */
2408 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2410 /* Conflicts between asm-declared register variables and the clobber
2411 list are not allowed. */
2412 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2416 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2417 DECL_NAME (overlap
));
2419 /* Reset registerness to stop multiple errors emitted for a single
2421 DECL_REGISTER (overlap
) = 0;
2428 /* Generate RTL for an asm statement with arguments.
2429 STRING is the instruction template.
2430 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2431 Each output or input has an expression in the TREE_VALUE and
2432 a tree list in TREE_PURPOSE which in turn contains a constraint
2433 name in TREE_VALUE (or NULL_TREE) and a constraint string
2435 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2436 that is clobbered by this insn.
2438 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2439 should be the fallthru basic block of the asm goto.
2441 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2442 Some elements of OUTPUTS may be replaced with trees representing temporary
2443 values. The caller should copy those temporary values to the originally
2446 VOL nonzero means the insn is volatile; don't optimize it. */
2449 expand_asm_operands (tree string
, tree outputs
, tree inputs
,
2450 tree clobbers
, tree labels
, basic_block fallthru_bb
,
2451 int vol
, location_t locus
)
2453 rtvec argvec
, constraintvec
, labelvec
;
2455 int ninputs
= list_length (inputs
);
2456 int noutputs
= list_length (outputs
);
2457 int nlabels
= list_length (labels
);
2460 HARD_REG_SET clobbered_regs
;
2461 int clobber_conflict_found
= 0;
2465 /* Vector of RTX's of evaluated output operands. */
2466 rtx
*output_rtx
= XALLOCAVEC (rtx
, noutputs
);
2467 int *inout_opnum
= XALLOCAVEC (int, noutputs
);
2468 rtx
*real_output_rtx
= XALLOCAVEC (rtx
, noutputs
);
2469 enum machine_mode
*inout_mode
= XALLOCAVEC (enum machine_mode
, noutputs
);
2470 const char **constraints
= XALLOCAVEC (const char *, noutputs
+ ninputs
);
2471 int old_generating_concat_p
= generating_concat_p
;
2472 rtx_code_label
*fallthru_label
= NULL
;
2474 /* An ASM with no outputs needs to be treated as volatile, for now. */
2478 if (! check_operand_nalternatives (outputs
, inputs
))
2481 string
= resolve_asm_operand_names (string
, outputs
, inputs
, labels
);
2483 /* Collect constraints. */
2485 for (t
= outputs
; t
; t
= TREE_CHAIN (t
), i
++)
2486 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2487 for (t
= inputs
; t
; t
= TREE_CHAIN (t
), i
++)
2488 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2490 /* Sometimes we wish to automatically clobber registers across an asm.
2491 Case in point is when the i386 backend moved from cc0 to a hard reg --
2492 maintaining source-level compatibility means automatically clobbering
2493 the flags register. */
2494 clobbers
= targetm
.md_asm_clobbers (outputs
, inputs
, clobbers
);
2496 /* Count the number of meaningful clobbered registers, ignoring what
2497 we would ignore later. */
2499 CLEAR_HARD_REG_SET (clobbered_regs
);
2500 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
2502 const char *regname
;
2505 if (TREE_VALUE (tail
) == error_mark_node
)
2507 regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
2509 i
= decode_reg_name_and_count (regname
, &nregs
);
2513 error ("unknown register name %qs in %<asm%>", regname
);
2515 /* Mark clobbered registers. */
2520 for (reg
= i
; reg
< i
+ nregs
; reg
++)
2524 /* Clobbering the PIC register is an error. */
2525 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2527 error ("PIC register clobbered by %qs in %<asm%>", regname
);
2531 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2536 /* First pass over inputs and outputs checks validity and sets
2537 mark_addressable if needed. */
2540 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
2542 tree val
= TREE_VALUE (tail
);
2543 tree type
= TREE_TYPE (val
);
2544 const char *constraint
;
2549 /* If there's an erroneous arg, emit no insn. */
2550 if (type
== error_mark_node
)
2553 /* Try to parse the output constraint. If that fails, there's
2554 no point in going further. */
2555 constraint
= constraints
[i
];
2556 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
2557 &allows_mem
, &allows_reg
, &is_inout
))
2564 && REG_P (DECL_RTL (val
))
2565 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
2566 mark_addressable (val
);
2573 if (ninputs
+ noutputs
> MAX_RECOG_OPERANDS
)
2575 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2579 for (i
= 0, tail
= inputs
; tail
; i
++, tail
= TREE_CHAIN (tail
))
2581 bool allows_reg
, allows_mem
;
2582 const char *constraint
;
2584 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2585 would get VOIDmode and that could cause a crash in reload. */
2586 if (TREE_TYPE (TREE_VALUE (tail
)) == error_mark_node
)
2589 constraint
= constraints
[i
+ noutputs
];
2590 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
2591 constraints
, &allows_mem
, &allows_reg
))
2594 if (! allows_reg
&& allows_mem
)
2595 mark_addressable (TREE_VALUE (tail
));
2598 /* Second pass evaluates arguments. */
2600 /* Make sure stack is consistent for asm goto. */
2602 do_pending_stack_adjust ();
2605 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
2607 tree val
= TREE_VALUE (tail
);
2608 tree type
= TREE_TYPE (val
);
2615 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
2616 noutputs
, &allows_mem
, &allows_reg
,
2620 /* If an output operand is not a decl or indirect ref and our constraint
2621 allows a register, make a temporary to act as an intermediate.
2622 Make the asm insn write into that, then our caller will copy it to
2623 the real output operand. Likewise for promoted variables. */
2625 generating_concat_p
= 0;
2627 real_output_rtx
[i
] = NULL_RTX
;
2628 if ((TREE_CODE (val
) == INDIRECT_REF
2631 && (allows_mem
|| REG_P (DECL_RTL (val
)))
2632 && ! (REG_P (DECL_RTL (val
))
2633 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
2637 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
2638 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
2640 op
= validize_mem (op
);
2642 if (! allows_reg
&& !MEM_P (op
))
2643 error ("output number %d not directly addressable", i
);
2644 if ((! allows_mem
&& MEM_P (op
))
2645 || GET_CODE (op
) == CONCAT
)
2647 real_output_rtx
[i
] = op
;
2648 op
= gen_reg_rtx (GET_MODE (op
));
2650 emit_move_insn (op
, real_output_rtx
[i
]);
2655 op
= assign_temp (type
, 0, 1);
2656 op
= validize_mem (op
);
2657 if (!MEM_P (op
) && TREE_CODE (TREE_VALUE (tail
)) == SSA_NAME
)
2658 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail
)), op
);
2659 TREE_VALUE (tail
) = make_tree (type
, op
);
2663 generating_concat_p
= old_generating_concat_p
;
2667 inout_mode
[ninout
] = TYPE_MODE (type
);
2668 inout_opnum
[ninout
++] = i
;
2671 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
2672 clobber_conflict_found
= 1;
2675 /* Make vectors for the expression-rtx, constraint strings,
2676 and named operands. */
2678 argvec
= rtvec_alloc (ninputs
);
2679 constraintvec
= rtvec_alloc (ninputs
);
2680 labelvec
= rtvec_alloc (nlabels
);
2682 body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
2683 : GET_MODE (output_rtx
[0])),
2684 ggc_strdup (TREE_STRING_POINTER (string
)),
2685 empty_string
, 0, argvec
, constraintvec
,
2688 MEM_VOLATILE_P (body
) = vol
;
2690 /* Eval the inputs and put them into ARGVEC.
2691 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2693 for (i
= 0, tail
= inputs
; tail
; tail
= TREE_CHAIN (tail
), ++i
)
2695 bool allows_reg
, allows_mem
;
2696 const char *constraint
;
2701 constraint
= constraints
[i
+ noutputs
];
2702 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, ninout
,
2703 constraints
, &allows_mem
, &allows_reg
);
2706 generating_concat_p
= 0;
2708 val
= TREE_VALUE (tail
);
2709 type
= TREE_TYPE (val
);
2710 /* EXPAND_INITIALIZER will not generate code for valid initializer
2711 constants, but will still generate code for other types of operand.
2712 This is the behavior we want for constant constraints. */
2713 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
2714 allows_reg
? EXPAND_NORMAL
2715 : allows_mem
? EXPAND_MEMORY
2716 : EXPAND_INITIALIZER
);
2718 /* Never pass a CONCAT to an ASM. */
2719 if (GET_CODE (op
) == CONCAT
)
2720 op
= force_reg (GET_MODE (op
), op
);
2721 else if (MEM_P (op
))
2722 op
= validize_mem (op
);
2724 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
2726 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
2727 op
= force_reg (TYPE_MODE (type
), op
);
2728 else if (!allows_mem
)
2729 warning (0, "asm operand %d probably doesn%'t match constraints",
2731 else if (MEM_P (op
))
2733 /* We won't recognize either volatile memory or memory
2734 with a queued address as available a memory_operand
2735 at this point. Ignore it: clearly this *is* a memory. */
2741 generating_concat_p
= old_generating_concat_p
;
2742 ASM_OPERANDS_INPUT (body
, i
) = op
;
2744 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
2745 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type
),
2746 ggc_strdup (constraints
[i
+ noutputs
]),
2749 if (tree_conflicts_with_clobbers_p (val
, &clobbered_regs
))
2750 clobber_conflict_found
= 1;
2753 /* Protect all the operands from the queue now that they have all been
2756 generating_concat_p
= 0;
2758 /* For in-out operands, copy output rtx to input rtx. */
2759 for (i
= 0; i
< ninout
; i
++)
2761 int j
= inout_opnum
[i
];
2764 ASM_OPERANDS_INPUT (body
, ninputs
- ninout
+ i
)
2767 sprintf (buffer
, "%d", j
);
2768 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, ninputs
- ninout
+ i
)
2769 = gen_rtx_ASM_INPUT_loc (inout_mode
[i
], ggc_strdup (buffer
), locus
);
2772 /* Copy labels to the vector. */
2773 for (i
= 0, tail
= labels
; i
< nlabels
; ++i
, tail
= TREE_CHAIN (tail
))
2776 /* If asm goto has any labels in the fallthru basic block, use
2777 a label that we emit immediately after the asm goto. Expansion
2778 may insert further instructions into the same basic block after
2779 asm goto and if we don't do this, insertion of instructions on
2780 the fallthru edge might misbehave. See PR58670. */
2782 && label_to_block_fn (cfun
, TREE_VALUE (tail
)) == fallthru_bb
)
2784 if (fallthru_label
== NULL_RTX
)
2785 fallthru_label
= gen_label_rtx ();
2789 r
= label_rtx (TREE_VALUE (tail
));
2790 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
2793 generating_concat_p
= old_generating_concat_p
;
2795 /* Now, for each output, construct an rtx
2796 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2797 ARGVEC CONSTRAINTS OPNAMES))
2798 If there is more than one, put them inside a PARALLEL. */
2800 if (nlabels
> 0 && nclobbers
== 0)
2802 gcc_assert (noutputs
== 0);
2803 emit_jump_insn (body
);
2805 else if (noutputs
== 0 && nclobbers
== 0)
2807 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2810 else if (noutputs
== 1 && nclobbers
== 0)
2812 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = ggc_strdup (constraints
[0]);
2813 emit_insn (gen_rtx_SET (VOIDmode
, output_rtx
[0], body
));
2823 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
2825 /* For each output operand, store a SET. */
2826 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
2828 XVECEXP (body
, 0, i
)
2829 = gen_rtx_SET (VOIDmode
,
2831 gen_rtx_ASM_OPERANDS
2832 (GET_MODE (output_rtx
[i
]),
2833 ggc_strdup (TREE_STRING_POINTER (string
)),
2834 ggc_strdup (constraints
[i
]),
2835 i
, argvec
, constraintvec
, labelvec
, locus
));
2837 MEM_VOLATILE_P (SET_SRC (XVECEXP (body
, 0, i
))) = vol
;
2840 /* If there are no outputs (but there are some clobbers)
2841 store the bare ASM_OPERANDS into the PARALLEL. */
2844 XVECEXP (body
, 0, i
++) = obody
;
2846 /* Store (clobber REG) for each clobbered register specified. */
2848 for (tail
= clobbers
; tail
; tail
= TREE_CHAIN (tail
))
2850 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (tail
));
2852 int j
= decode_reg_name_and_count (regname
, &nregs
);
2857 if (j
== -3) /* `cc', which is not a register */
2860 if (j
== -4) /* `memory', don't cache memory across asm */
2862 XVECEXP (body
, 0, i
++)
2863 = gen_rtx_CLOBBER (VOIDmode
,
2866 gen_rtx_SCRATCH (VOIDmode
)));
2870 /* Ignore unknown register, error already signaled. */
2874 for (reg
= j
; reg
< j
+ nregs
; reg
++)
2876 /* Use QImode since that's guaranteed to clobber just
2878 clobbered_reg
= gen_rtx_REG (QImode
, reg
);
2880 /* Do sanity check for overlap between clobbers and
2881 respectively input and outputs that hasn't been
2882 handled. Such overlap should have been detected and
2884 if (!clobber_conflict_found
)
2888 /* We test the old body (obody) contents to avoid
2889 tripping over the under-construction body. */
2890 for (opno
= 0; opno
< noutputs
; opno
++)
2891 if (reg_overlap_mentioned_p (clobbered_reg
,
2894 ("asm clobber conflict with output operand");
2896 for (opno
= 0; opno
< ninputs
- ninout
; opno
++)
2897 if (reg_overlap_mentioned_p (clobbered_reg
,
2898 ASM_OPERANDS_INPUT (obody
,
2901 ("asm clobber conflict with input operand");
2904 XVECEXP (body
, 0, i
++)
2905 = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
2910 emit_jump_insn (body
);
2916 emit_label (fallthru_label
);
2918 /* For any outputs that needed reloading into registers, spill them
2919 back to where they belong. */
2920 for (i
= 0; i
< noutputs
; ++i
)
2921 if (real_output_rtx
[i
])
2922 emit_move_insn (real_output_rtx
[i
], output_rtx
[i
]);
2924 crtl
->has_asm_statement
= 1;
2930 expand_asm_stmt (gimple stmt
)
2933 tree outputs
, tail
, t
;
2937 tree str
, out
, in
, cl
, labels
;
2938 location_t locus
= gimple_location (stmt
);
2939 basic_block fallthru_bb
= NULL
;
2941 /* Meh... convert the gimple asm operands into real tree lists.
2942 Eventually we should make all routines work on the vectors instead
2943 of relying on TREE_CHAIN. */
2945 n
= gimple_asm_noutputs (stmt
);
2948 t
= out
= gimple_asm_output_op (stmt
, 0);
2949 for (i
= 1; i
< n
; i
++)
2950 t
= TREE_CHAIN (t
) = gimple_asm_output_op (stmt
, i
);
2954 n
= gimple_asm_ninputs (stmt
);
2957 t
= in
= gimple_asm_input_op (stmt
, 0);
2958 for (i
= 1; i
< n
; i
++)
2959 t
= TREE_CHAIN (t
) = gimple_asm_input_op (stmt
, i
);
2963 n
= gimple_asm_nclobbers (stmt
);
2966 t
= cl
= gimple_asm_clobber_op (stmt
, 0);
2967 for (i
= 1; i
< n
; i
++)
2968 t
= TREE_CHAIN (t
) = gimple_asm_clobber_op (stmt
, i
);
2972 n
= gimple_asm_nlabels (stmt
);
2975 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
2977 fallthru_bb
= fallthru
->dest
;
2978 t
= labels
= gimple_asm_label_op (stmt
, 0);
2979 for (i
= 1; i
< n
; i
++)
2980 t
= TREE_CHAIN (t
) = gimple_asm_label_op (stmt
, i
);
2983 s
= gimple_asm_string (stmt
);
2984 str
= build_string (strlen (s
), s
);
2986 if (gimple_asm_input_p (stmt
))
2988 expand_asm_loc (str
, gimple_asm_volatile_p (stmt
), locus
);
2993 noutputs
= gimple_asm_noutputs (stmt
);
2994 /* o[I] is the place that output number I should be written. */
2995 o
= (tree
*) alloca (noutputs
* sizeof (tree
));
2997 /* Record the contents of OUTPUTS before it is modified. */
2998 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
2999 o
[i
] = TREE_VALUE (tail
);
3001 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3002 OUTPUTS some trees for where the values were actually stored. */
3003 expand_asm_operands (str
, outputs
, in
, cl
, labels
, fallthru_bb
,
3004 gimple_asm_volatile_p (stmt
), locus
);
3006 /* Copy all the intermediate outputs into the specified outputs. */
3007 for (i
= 0, tail
= outputs
; tail
; tail
= TREE_CHAIN (tail
), i
++)
3009 if (o
[i
] != TREE_VALUE (tail
))
3011 expand_assignment (o
[i
], TREE_VALUE (tail
), false);
3014 /* Restore the original value so that it's correct the next
3015 time we expand this function. */
3016 TREE_VALUE (tail
) = o
[i
];
3021 /* Emit code to jump to the address
3022 specified by the pointer expression EXP. */
3025 expand_computed_goto (tree exp
)
3027 rtx x
= expand_normal (exp
);
3029 x
= convert_memory_address (Pmode
, x
);
3031 do_pending_stack_adjust ();
3032 emit_indirect_jump (x
);
3035 /* Generate RTL code for a `goto' statement with target label LABEL.
3036 LABEL should be a LABEL_DECL tree node that was or will later be
3037 defined with `expand_label'. */
3040 expand_goto (tree label
)
3042 #ifdef ENABLE_CHECKING
3043 /* Check for a nonlocal goto to a containing function. Should have
3044 gotten translated to __builtin_nonlocal_goto. */
3045 tree context
= decl_function_context (label
);
3046 gcc_assert (!context
|| context
== current_function_decl
);
3049 emit_jump (label_rtx (label
));
3052 /* Output a return with no value. */
3055 expand_null_return_1 (void)
3057 clear_pending_stack_adjust ();
3058 do_pending_stack_adjust ();
3059 emit_jump (return_label
);
3062 /* Generate RTL to return from the current function, with no value.
3063 (That is, we do not do anything about returning any value.) */
3066 expand_null_return (void)
3068 /* If this function was declared to return a value, but we
3069 didn't, clobber the return registers so that they are not
3070 propagated live to the rest of the function. */
3071 clobber_return_register ();
3073 expand_null_return_1 ();
3076 /* Generate RTL to return from the current function, with value VAL. */
3079 expand_value_return (rtx val
)
3081 /* Copy the value to the return location unless it's already there. */
3083 tree decl
= DECL_RESULT (current_function_decl
);
3084 rtx return_reg
= DECL_RTL (decl
);
3085 if (return_reg
!= val
)
3087 tree funtype
= TREE_TYPE (current_function_decl
);
3088 tree type
= TREE_TYPE (decl
);
3089 int unsignedp
= TYPE_UNSIGNED (type
);
3090 enum machine_mode old_mode
= DECL_MODE (decl
);
3091 enum machine_mode mode
;
3092 if (DECL_BY_REFERENCE (decl
))
3093 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3095 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3097 if (mode
!= old_mode
)
3098 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3100 if (GET_CODE (return_reg
) == PARALLEL
)
3101 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3103 emit_move_insn (return_reg
, val
);
3106 expand_null_return_1 ();
3109 /* Generate RTL to evaluate the expression RETVAL and return it
3110 from the current function. */
3113 expand_return (tree retval
)
3119 /* If function wants no value, give it none. */
3120 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3122 expand_normal (retval
);
3123 expand_null_return ();
3127 if (retval
== error_mark_node
)
3129 /* Treat this like a return of no value from a function that
3131 expand_null_return ();
3134 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3135 || TREE_CODE (retval
) == INIT_EXPR
)
3136 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3137 retval_rhs
= TREE_OPERAND (retval
, 1);
3139 retval_rhs
= retval
;
3141 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3143 /* If we are returning the RESULT_DECL, then the value has already
3144 been stored into it, so we don't have to do anything special. */
3145 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3146 expand_value_return (result_rtl
);
3148 /* If the result is an aggregate that is being returned in one (or more)
3149 registers, load the registers here. */
3151 else if (retval_rhs
!= 0
3152 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3153 && REG_P (result_rtl
))
3155 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3158 /* Use the mode of the result value on the return register. */
3159 PUT_MODE (result_rtl
, GET_MODE (val
));
3160 expand_value_return (val
);
3163 expand_null_return ();
3165 else if (retval_rhs
!= 0
3166 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3167 && (REG_P (result_rtl
)
3168 || (GET_CODE (result_rtl
) == PARALLEL
)))
3170 /* Compute the return value into a temporary (usually a pseudo reg). */
3172 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3173 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3174 val
= force_not_mem (val
);
3175 expand_value_return (val
);
3179 /* No hard reg used; calculate value into hard return reg. */
3180 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3181 expand_value_return (result_rtl
);
3185 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3186 STMT that doesn't require special handling for outgoing edges. That
3187 is no tailcalls and no GIMPLE_COND. */
3190 expand_gimple_stmt_1 (gimple stmt
)
3194 set_curr_insn_location (gimple_location (stmt
));
3196 switch (gimple_code (stmt
))
3199 op0
= gimple_goto_dest (stmt
);
3200 if (TREE_CODE (op0
) == LABEL_DECL
)
3203 expand_computed_goto (op0
);
3206 expand_label (gimple_label_label (stmt
));
3209 case GIMPLE_PREDICT
:
3215 expand_asm_stmt (stmt
);
3218 expand_call_stmt (stmt
);
3222 op0
= gimple_return_retval (stmt
);
3224 if (op0
&& op0
!= error_mark_node
)
3226 tree result
= DECL_RESULT (current_function_decl
);
3228 /* If we are not returning the current function's RESULT_DECL,
3229 build an assignment to it. */
3232 /* I believe that a function's RESULT_DECL is unique. */
3233 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3235 /* ??? We'd like to use simply expand_assignment here,
3236 but this fails if the value is of BLKmode but the return
3237 decl is a register. expand_return has special handling
3238 for this combination, which eventually should move
3239 to common code. See comments there. Until then, let's
3240 build a modify expression :-/ */
3241 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3246 expand_null_return ();
3248 expand_return (op0
);
3253 tree lhs
= gimple_assign_lhs (stmt
);
3255 /* Tree expand used to fiddle with |= and &= of two bitfield
3256 COMPONENT_REFs here. This can't happen with gimple, the LHS
3257 of binary assigns must be a gimple reg. */
3259 if (TREE_CODE (lhs
) != SSA_NAME
3260 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3261 == GIMPLE_SINGLE_RHS
)
3263 tree rhs
= gimple_assign_rhs1 (stmt
);
3264 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3265 == GIMPLE_SINGLE_RHS
);
3266 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
))
3267 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3268 if (TREE_CLOBBER_P (rhs
))
3269 /* This is a clobber to mark the going out of scope for
3273 expand_assignment (lhs
, rhs
,
3274 gimple_assign_nontemporal_move_p (stmt
));
3279 bool nontemporal
= gimple_assign_nontemporal_move_p (stmt
);
3280 struct separate_ops ops
;
3281 bool promoted
= false;
3283 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3284 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3287 ops
.code
= gimple_assign_rhs_code (stmt
);
3288 ops
.type
= TREE_TYPE (lhs
);
3289 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
3291 case GIMPLE_TERNARY_RHS
:
3292 ops
.op2
= gimple_assign_rhs3 (stmt
);
3294 case GIMPLE_BINARY_RHS
:
3295 ops
.op1
= gimple_assign_rhs2 (stmt
);
3297 case GIMPLE_UNARY_RHS
:
3298 ops
.op0
= gimple_assign_rhs1 (stmt
);
3303 ops
.location
= gimple_location (stmt
);
3305 /* If we want to use a nontemporal store, force the value to
3306 register first. If we store into a promoted register,
3307 don't directly expand to target. */
3308 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3309 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3316 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3317 /* If TEMP is a VOIDmode constant, use convert_modes to make
3318 sure that we properly convert it. */
3319 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3321 temp
= convert_modes (GET_MODE (target
),
3322 TYPE_MODE (ops
.type
),
3324 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3325 GET_MODE (target
), temp
, unsignedp
);
3328 if ((SUBREG_PROMOTED_GET (target
) == SRP_SIGNED_AND_UNSIGNED
)
3329 && (GET_CODE (temp
) == SUBREG
)
3330 && (GET_MODE (target
) == GET_MODE (temp
))
3331 && (GET_MODE (SUBREG_REG (target
)) == GET_MODE (SUBREG_REG (temp
))))
3332 emit_move_insn (SUBREG_REG (target
), SUBREG_REG (temp
));
3334 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3336 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3340 temp
= force_operand (temp
, target
);
3342 emit_move_insn (target
, temp
);
3353 /* Expand one gimple statement STMT and return the last RTL instruction
3354 before any of the newly generated ones.
3356 In addition to generating the necessary RTL instructions this also
3357 sets REG_EH_REGION notes if necessary and sets the current source
3358 location for diagnostics. */
3361 expand_gimple_stmt (gimple stmt
)
3363 location_t saved_location
= input_location
;
3364 rtx_insn
*last
= get_last_insn ();
3369 /* We need to save and restore the current source location so that errors
3370 discovered during expansion are emitted with the right location. But
3371 it would be better if the diagnostic routines used the source location
3372 embedded in the tree nodes rather than globals. */
3373 if (gimple_has_location (stmt
))
3374 input_location
= gimple_location (stmt
);
3376 expand_gimple_stmt_1 (stmt
);
3378 /* Free any temporaries used to evaluate this statement. */
3381 input_location
= saved_location
;
3383 /* Mark all insns that may trap. */
3384 lp_nr
= lookup_stmt_eh_lp (stmt
);
3388 for (insn
= next_real_insn (last
); insn
;
3389 insn
= next_real_insn (insn
))
3391 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3392 /* If we want exceptions for non-call insns, any
3393 may_trap_p instruction may throw. */
3394 && GET_CODE (PATTERN (insn
)) != CLOBBER
3395 && GET_CODE (PATTERN (insn
)) != USE
3396 && insn_could_throw_p (insn
))
3397 make_reg_eh_region_note (insn
, 0, lp_nr
);
3404 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3405 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3406 generated a tail call (something that might be denied by the ABI
3407 rules governing the call; see calls.c).
3409 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3410 can still reach the rest of BB. The case here is __builtin_sqrt,
3411 where the NaN result goes through the external function (with a
3412 tailcall) and the normal result happens via a sqrt instruction. */
3415 expand_gimple_tailcall (basic_block bb
, gimple stmt
, bool *can_fallthru
)
3417 rtx_insn
*last2
, *last
;
3423 last2
= last
= expand_gimple_stmt (stmt
);
3425 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3426 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3429 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3431 *can_fallthru
= true;
3435 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3436 Any instructions emitted here are about to be deleted. */
3437 do_pending_stack_adjust ();
3439 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3440 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3441 EH or abnormal edges, we shouldn't have created a tail call in
3442 the first place. So it seems to me we should just be removing
3443 all edges here, or redirecting the existing fallthru edge to
3449 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3451 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3453 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3455 e
->dest
->count
-= e
->count
;
3456 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
3457 if (e
->dest
->count
< 0)
3459 if (e
->dest
->frequency
< 0)
3460 e
->dest
->frequency
= 0;
3463 probability
+= e
->probability
;
3470 /* This is somewhat ugly: the call_expr expander often emits instructions
3471 after the sibcall (to perform the function return). These confuse the
3472 find_many_sub_basic_blocks code, so we need to get rid of these. */
3473 last
= NEXT_INSN (last
);
3474 gcc_assert (BARRIER_P (last
));
3476 *can_fallthru
= false;
3477 while (NEXT_INSN (last
))
3479 /* For instance an sqrt builtin expander expands if with
3480 sibcall in the then and label for `else`. */
3481 if (LABEL_P (NEXT_INSN (last
)))
3483 *can_fallthru
= true;
3486 delete_insn (NEXT_INSN (last
));
3489 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3491 e
->probability
+= probability
;
3494 update_bb_for_insn (bb
);
3496 if (NEXT_INSN (last
))
3498 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3501 if (BARRIER_P (last
))
3502 BB_END (bb
) = PREV_INSN (last
);
3505 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3510 /* Return the difference between the floor and the truncated result of
3511 a signed division by OP1 with remainder MOD. */
3513 floor_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
3515 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3516 return gen_rtx_IF_THEN_ELSE
3517 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3518 gen_rtx_IF_THEN_ELSE
3519 (mode
, gen_rtx_LT (BImode
,
3520 gen_rtx_DIV (mode
, op1
, mod
),
3522 constm1_rtx
, const0_rtx
),
3526 /* Return the difference between the ceil and the truncated result of
3527 a signed division by OP1 with remainder MOD. */
3529 ceil_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
3531 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3532 return gen_rtx_IF_THEN_ELSE
3533 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3534 gen_rtx_IF_THEN_ELSE
3535 (mode
, gen_rtx_GT (BImode
,
3536 gen_rtx_DIV (mode
, op1
, mod
),
3538 const1_rtx
, const0_rtx
),
3542 /* Return the difference between the ceil and the truncated result of
3543 an unsigned division by OP1 with remainder MOD. */
3545 ceil_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3547 /* (mod != 0 ? 1 : 0) */
3548 return gen_rtx_IF_THEN_ELSE
3549 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3550 const1_rtx
, const0_rtx
);
3553 /* Return the difference between the rounded and the truncated result
3554 of a signed division by OP1 with remainder MOD. Halfway cases are
3555 rounded away from zero, rather than to the nearest even number. */
3557 round_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
3559 /* (abs (mod) >= abs (op1) - abs (mod)
3560 ? (op1 / mod > 0 ? 1 : -1)
3562 return gen_rtx_IF_THEN_ELSE
3563 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3564 gen_rtx_MINUS (mode
,
3565 gen_rtx_ABS (mode
, op1
),
3566 gen_rtx_ABS (mode
, mod
))),
3567 gen_rtx_IF_THEN_ELSE
3568 (mode
, gen_rtx_GT (BImode
,
3569 gen_rtx_DIV (mode
, op1
, mod
),
3571 const1_rtx
, constm1_rtx
),
3575 /* Return the difference between the rounded and the truncated result
3576 of a unsigned division by OP1 with remainder MOD. Halfway cases
3577 are rounded away from zero, rather than to the nearest even
3580 round_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
3582 /* (mod >= op1 - mod ? 1 : 0) */
3583 return gen_rtx_IF_THEN_ELSE
3584 (mode
, gen_rtx_GE (BImode
, mod
,
3585 gen_rtx_MINUS (mode
, op1
, mod
)),
3586 const1_rtx
, const0_rtx
);
3589 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3593 convert_debug_memory_address (enum machine_mode mode
, rtx x
,
3596 enum machine_mode xmode
= GET_MODE (x
);
3598 #ifndef POINTERS_EXTEND_UNSIGNED
3599 gcc_assert (mode
== Pmode
3600 || mode
== targetm
.addr_space
.address_mode (as
));
3601 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
3605 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
3607 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
3610 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
3611 x
= simplify_gen_subreg (mode
, x
, xmode
,
3612 subreg_lowpart_offset
3614 else if (POINTERS_EXTEND_UNSIGNED
> 0)
3615 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
3616 else if (!POINTERS_EXTEND_UNSIGNED
)
3617 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
3620 switch (GET_CODE (x
))
3623 if ((SUBREG_PROMOTED_VAR_P (x
)
3624 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
3625 || (GET_CODE (SUBREG_REG (x
)) == PLUS
3626 && REG_P (XEXP (SUBREG_REG (x
), 0))
3627 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
3628 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
3629 && GET_MODE (SUBREG_REG (x
)) == mode
)
3630 return SUBREG_REG (x
);
3633 temp
= gen_rtx_LABEL_REF (mode
, XEXP (x
, 0));
3634 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
3637 temp
= shallow_copy_rtx (x
);
3638 PUT_MODE (temp
, mode
);
3641 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
3643 temp
= gen_rtx_CONST (mode
, temp
);
3647 if (CONST_INT_P (XEXP (x
, 1)))
3649 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
3651 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
3657 /* Don't know how to express ptr_extend as operation in debug info. */
3660 #endif /* POINTERS_EXTEND_UNSIGNED */
3665 /* Return an RTX equivalent to the value of the parameter DECL. */
3668 expand_debug_parm_decl (tree decl
)
3670 rtx incoming
= DECL_INCOMING_RTL (decl
);
3673 && GET_MODE (incoming
) != BLKmode
3674 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
3675 || (MEM_P (incoming
)
3676 && REG_P (XEXP (incoming
, 0))
3677 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
3679 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
3681 #ifdef HAVE_window_save
3682 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3683 If the target machine has an explicit window save instruction, the
3684 actual entry value is the corresponding OUTGOING_REGNO instead. */
3685 if (REG_P (incoming
)
3686 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
3688 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
3689 OUTGOING_REGNO (REGNO (incoming
)), 0);
3690 else if (MEM_P (incoming
))
3692 rtx reg
= XEXP (incoming
, 0);
3693 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
3695 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
3696 incoming
= replace_equiv_address_nv (incoming
, reg
);
3699 incoming
= copy_rtx (incoming
);
3703 ENTRY_VALUE_EXP (rtl
) = incoming
;
3708 && GET_MODE (incoming
) != BLKmode
3709 && !TREE_ADDRESSABLE (decl
)
3711 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
3712 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
3713 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
3714 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
3715 return copy_rtx (incoming
);
3720 /* Return an RTX equivalent to the value of the tree expression EXP. */
3723 expand_debug_expr (tree exp
)
3725 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
3726 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3727 enum machine_mode inner_mode
= VOIDmode
;
3728 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
3731 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
3733 case tcc_expression
:
3734 switch (TREE_CODE (exp
))
3739 case WIDEN_MULT_PLUS_EXPR
:
3740 case WIDEN_MULT_MINUS_EXPR
:
3744 case TRUTH_ANDIF_EXPR
:
3745 case TRUTH_ORIF_EXPR
:
3746 case TRUTH_AND_EXPR
:
3748 case TRUTH_XOR_EXPR
:
3751 case TRUTH_NOT_EXPR
:
3760 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
3767 case tcc_comparison
:
3768 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
3775 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3776 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
3786 case tcc_exceptional
:
3787 case tcc_declaration
:
3793 switch (TREE_CODE (exp
))
3796 if (!lookup_constant_def (exp
))
3798 if (strlen (TREE_STRING_POINTER (exp
)) + 1
3799 != (size_t) TREE_STRING_LENGTH (exp
))
3801 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
3802 op0
= gen_rtx_MEM (BLKmode
, op0
);
3803 set_mem_attributes (op0
, exp
, 0);
3806 /* Fall through... */
3811 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
3815 gcc_assert (COMPLEX_MODE_P (mode
));
3816 op0
= expand_debug_expr (TREE_REALPART (exp
));
3817 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
3818 return gen_rtx_CONCAT (mode
, op0
, op1
);
3820 case DEBUG_EXPR_DECL
:
3821 op0
= DECL_RTL_IF_SET (exp
);
3826 op0
= gen_rtx_DEBUG_EXPR (mode
);
3827 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
3828 SET_DECL_RTL (exp
, op0
);
3838 op0
= DECL_RTL_IF_SET (exp
);
3840 /* This decl was probably optimized away. */
3843 if (TREE_CODE (exp
) != VAR_DECL
3844 || DECL_EXTERNAL (exp
)
3845 || !TREE_STATIC (exp
)
3847 || DECL_HARD_REGISTER (exp
)
3848 || DECL_IN_CONSTANT_POOL (exp
)
3849 || mode
== VOIDmode
)
3852 op0
= make_decl_rtl_for_debug (exp
);
3854 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
3855 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
3859 op0
= copy_rtx (op0
);
3861 if (GET_MODE (op0
) == BLKmode
3862 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3863 below would ICE. While it is likely a FE bug,
3864 try to be robust here. See PR43166. */
3866 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
3868 gcc_assert (MEM_P (op0
));
3869 op0
= adjust_address_nv (op0
, mode
, 0);
3880 inner_mode
= GET_MODE (op0
);
3882 if (mode
== inner_mode
)
3885 if (inner_mode
== VOIDmode
)
3887 if (TREE_CODE (exp
) == SSA_NAME
)
3888 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
3890 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3891 if (mode
== inner_mode
)
3895 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
3897 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
3898 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
3899 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
3900 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
3902 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
3904 else if (FLOAT_MODE_P (mode
))
3906 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
3907 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3908 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
3910 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
3912 else if (FLOAT_MODE_P (inner_mode
))
3915 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
3917 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
3919 else if (CONSTANT_P (op0
)
3920 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
3921 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
3922 subreg_lowpart_offset (mode
,
3924 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == tcc_unary
3925 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
3927 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3929 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3935 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
3937 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
3938 TREE_OPERAND (exp
, 0),
3939 TREE_OPERAND (exp
, 1));
3941 return expand_debug_expr (newexp
);
3945 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3946 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
3950 if (TREE_CODE (exp
) == MEM_REF
)
3952 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
3953 || (GET_CODE (op0
) == PLUS
3954 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
3955 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3956 Instead just use get_inner_reference. */
3959 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
3960 if (!op1
|| !CONST_INT_P (op1
))
3963 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
3966 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
3968 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
3970 if (op0
== NULL_RTX
)
3973 op0
= gen_rtx_MEM (mode
, op0
);
3974 set_mem_attributes (op0
, exp
, 0);
3975 if (TREE_CODE (exp
) == MEM_REF
3976 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
3977 set_mem_expr (op0
, NULL_TREE
);
3978 set_mem_addr_space (op0
, as
);
3982 case TARGET_MEM_REF
:
3983 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
3984 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
3987 op0
= expand_debug_expr
3988 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
3992 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
3993 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
3995 as
= ADDR_SPACE_GENERIC
;
3997 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
3999 if (op0
== NULL_RTX
)
4002 op0
= gen_rtx_MEM (mode
, op0
);
4004 set_mem_attributes (op0
, exp
, 0);
4005 set_mem_addr_space (op0
, as
);
4011 case ARRAY_RANGE_REF
:
4016 case VIEW_CONVERT_EXPR
:
4018 enum machine_mode mode1
;
4019 HOST_WIDE_INT bitsize
, bitpos
;
4022 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
4023 &mode1
, &unsignedp
, &volatilep
, false);
4029 orig_op0
= op0
= expand_debug_expr (tem
);
4036 enum machine_mode addrmode
, offmode
;
4041 op0
= XEXP (op0
, 0);
4042 addrmode
= GET_MODE (op0
);
4043 if (addrmode
== VOIDmode
)
4046 op1
= expand_debug_expr (offset
);
4050 offmode
= GET_MODE (op1
);
4051 if (offmode
== VOIDmode
)
4052 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4054 if (addrmode
!= offmode
)
4055 op1
= simplify_gen_subreg (addrmode
, op1
, offmode
,
4056 subreg_lowpart_offset (addrmode
,
4059 /* Don't use offset_address here, we don't need a
4060 recognizable address, and we don't want to generate
4062 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4068 if (mode1
== VOIDmode
)
4070 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
4071 if (bitpos
>= BITS_PER_UNIT
)
4073 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
4074 bitpos
%= BITS_PER_UNIT
;
4076 else if (bitpos
< 0)
4079 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
4080 op0
= adjust_address_nv (op0
, mode1
, units
);
4081 bitpos
+= units
* BITS_PER_UNIT
;
4083 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
4084 op0
= adjust_address_nv (op0
, mode
, 0);
4085 else if (GET_MODE (op0
) != mode1
)
4086 op0
= adjust_address_nv (op0
, mode1
, 0);
4088 op0
= copy_rtx (op0
);
4089 if (op0
== orig_op0
)
4090 op0
= shallow_copy_rtx (op0
);
4091 set_mem_attributes (op0
, exp
, 0);
4094 if (bitpos
== 0 && mode
== GET_MODE (op0
))
4100 if (GET_MODE (op0
) == BLKmode
)
4103 if ((bitpos
% BITS_PER_UNIT
) == 0
4104 && bitsize
== GET_MODE_BITSIZE (mode1
))
4106 enum machine_mode opmode
= GET_MODE (op0
);
4108 if (opmode
== VOIDmode
)
4109 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4111 /* This condition may hold if we're expanding the address
4112 right past the end of an array that turned out not to
4113 be addressable (i.e., the address was only computed in
4114 debug stmts). The gen_subreg below would rightfully
4115 crash, and the address doesn't really exist, so just
4117 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
4120 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
4121 return simplify_gen_subreg (mode
, op0
, opmode
,
4122 bitpos
/ BITS_PER_UNIT
);
4125 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4126 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4128 : ZERO_EXTRACT
, mode
,
4129 GET_MODE (op0
) != VOIDmode
4131 : TYPE_MODE (TREE_TYPE (tem
)),
4132 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
4136 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4139 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4142 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4145 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4147 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4150 case FIX_TRUNC_EXPR
:
4151 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4154 case POINTER_PLUS_EXPR
:
4155 /* For the rare target where pointers are not the same size as
4156 size_t, we need to check for mis-matched modes and correct
4159 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
4160 && GET_MODE (op0
) != GET_MODE (op1
))
4162 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
))
4163 /* If OP0 is a partial mode, then we must truncate, even if it has
4164 the same bitsize as OP1 as GCC's representation of partial modes
4166 || (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_PARTIAL_INT
4167 && GET_MODE_BITSIZE (GET_MODE (op0
)) == GET_MODE_BITSIZE (GET_MODE (op1
))))
4168 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
4171 /* We always sign-extend, regardless of the signedness of
4172 the operand, because the operand is always unsigned
4173 here even if the original C expression is signed. */
4174 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
4179 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4182 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4185 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4188 case TRUNC_DIV_EXPR
:
4189 case EXACT_DIV_EXPR
:
4191 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4193 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4195 case TRUNC_MOD_EXPR
:
4196 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4198 case FLOOR_DIV_EXPR
:
4200 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4203 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4204 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4205 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4206 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4209 case FLOOR_MOD_EXPR
:
4211 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4214 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4215 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4216 adj
= simplify_gen_unary (NEG
, mode
,
4217 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4219 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4225 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4226 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4227 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4228 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4232 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4233 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4234 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4235 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4241 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4242 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4243 adj
= simplify_gen_unary (NEG
, mode
,
4244 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4246 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4250 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4251 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4252 adj
= simplify_gen_unary (NEG
, mode
,
4253 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4255 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4258 case ROUND_DIV_EXPR
:
4261 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4262 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4263 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4264 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4268 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4269 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4270 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4271 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4274 case ROUND_MOD_EXPR
:
4277 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4278 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4279 adj
= simplify_gen_unary (NEG
, mode
,
4280 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4282 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4286 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4287 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4288 adj
= simplify_gen_unary (NEG
, mode
,
4289 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4291 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4295 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4299 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4301 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4304 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4307 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4310 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4313 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4316 case TRUTH_AND_EXPR
:
4317 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4321 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4324 case TRUTH_XOR_EXPR
:
4325 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4327 case TRUTH_ANDIF_EXPR
:
4328 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4330 case TRUTH_ORIF_EXPR
:
4331 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4333 case TRUTH_NOT_EXPR
:
4334 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4337 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4341 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4345 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4349 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4353 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4356 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4358 case UNORDERED_EXPR
:
4359 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4362 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4365 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4368 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4371 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4374 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4377 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4380 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4383 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4386 gcc_assert (COMPLEX_MODE_P (mode
));
4387 if (GET_MODE (op0
) == VOIDmode
)
4388 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4389 if (GET_MODE (op1
) == VOIDmode
)
4390 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4391 return gen_rtx_CONCAT (mode
, op0
, op1
);
4394 if (GET_CODE (op0
) == CONCAT
)
4395 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4396 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4398 GET_MODE_INNER (mode
)));
4401 enum machine_mode imode
= GET_MODE_INNER (mode
);
4406 re
= adjust_address_nv (op0
, imode
, 0);
4407 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4411 enum machine_mode ifmode
= int_mode_for_mode (mode
);
4412 enum machine_mode ihmode
= int_mode_for_mode (imode
);
4414 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
4416 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4419 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4420 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4421 if (imode
!= ihmode
)
4422 re
= gen_rtx_SUBREG (imode
, re
, 0);
4423 im
= copy_rtx (op0
);
4425 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4426 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4427 if (imode
!= ihmode
)
4428 im
= gen_rtx_SUBREG (imode
, im
, 0);
4430 im
= gen_rtx_NEG (imode
, im
);
4431 return gen_rtx_CONCAT (mode
, re
, im
);
4435 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4436 if (!op0
|| !MEM_P (op0
))
4438 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4439 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4440 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4441 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4442 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4443 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4445 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4447 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
4449 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
4450 &bitoffset
, &bitsize
, &maxsize
);
4451 if ((TREE_CODE (decl
) == VAR_DECL
4452 || TREE_CODE (decl
) == PARM_DECL
4453 || TREE_CODE (decl
) == RESULT_DECL
)
4454 && (!TREE_ADDRESSABLE (decl
)
4455 || target_for_debug_bind (decl
))
4456 && (bitoffset
% BITS_PER_UNIT
) == 0
4458 && bitsize
== maxsize
)
4460 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4461 return plus_constant (mode
, base
, bitoffset
/ BITS_PER_UNIT
);
4465 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4466 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4469 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4472 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4473 || (GET_CODE (op0
) == PLUS
4474 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4475 && CONST_INT_P (XEXP (op0
, 1)))))
4477 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4479 if (!op1
|| !CONST_INT_P (op1
))
4482 return plus_constant (mode
, op0
, INTVAL (op1
));
4489 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4490 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
4498 op0
= gen_rtx_CONCATN
4499 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4501 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
4503 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
4506 XVECEXP (op0
, 0, i
) = op1
;
4513 if (TREE_CLOBBER_P (exp
))
4515 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
4520 op0
= gen_rtx_CONCATN
4521 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4523 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
4525 op1
= expand_debug_expr (val
);
4528 XVECEXP (op0
, 0, i
) = op1
;
4531 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
4533 op1
= expand_debug_expr
4534 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
4539 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
4540 XVECEXP (op0
, 0, i
) = op1
;
4546 goto flag_unsupported
;
4549 /* ??? Maybe handle some builtins? */
4554 gimple g
= get_gimple_for_ssa_name (exp
);
4557 op0
= expand_debug_expr (gimple_assign_rhs_to_tree (g
));
4563 int part
= var_to_partition (SA
.map
, exp
);
4565 if (part
== NO_PARTITION
)
4567 /* If this is a reference to an incoming value of parameter
4568 that is never used in the code or where the incoming
4569 value is never used in the code, use PARM_DECL's
4571 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
4572 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
)
4574 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
4577 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
4584 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
4586 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
4594 /* Vector stuff. For most of the codes we don't have rtl codes. */
4595 case REALIGN_LOAD_EXPR
:
4596 case REDUC_MAX_EXPR
:
4597 case REDUC_MIN_EXPR
:
4598 case REDUC_PLUS_EXPR
:
4600 case VEC_LSHIFT_EXPR
:
4601 case VEC_PACK_FIX_TRUNC_EXPR
:
4602 case VEC_PACK_SAT_EXPR
:
4603 case VEC_PACK_TRUNC_EXPR
:
4604 case VEC_RSHIFT_EXPR
:
4605 case VEC_UNPACK_FLOAT_HI_EXPR
:
4606 case VEC_UNPACK_FLOAT_LO_EXPR
:
4607 case VEC_UNPACK_HI_EXPR
:
4608 case VEC_UNPACK_LO_EXPR
:
4609 case VEC_WIDEN_MULT_HI_EXPR
:
4610 case VEC_WIDEN_MULT_LO_EXPR
:
4611 case VEC_WIDEN_MULT_EVEN_EXPR
:
4612 case VEC_WIDEN_MULT_ODD_EXPR
:
4613 case VEC_WIDEN_LSHIFT_HI_EXPR
:
4614 case VEC_WIDEN_LSHIFT_LO_EXPR
:
4619 case ADDR_SPACE_CONVERT_EXPR
:
4620 case FIXED_CONVERT_EXPR
:
4622 case WITH_SIZE_EXPR
:
4626 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
4627 && SCALAR_INT_MODE_P (mode
))
4630 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4632 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
4635 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4637 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
4639 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
4640 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
4644 case WIDEN_MULT_EXPR
:
4645 case WIDEN_MULT_PLUS_EXPR
:
4646 case WIDEN_MULT_MINUS_EXPR
:
4647 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
4648 && SCALAR_INT_MODE_P (mode
))
4650 inner_mode
= GET_MODE (op0
);
4651 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4652 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4654 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4655 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
4656 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
4658 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
4659 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
4660 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
4662 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
4663 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
4665 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
4669 case MULT_HIGHPART_EXPR
:
4670 /* ??? Similar to the above. */
4673 case WIDEN_SUM_EXPR
:
4674 case WIDEN_LSHIFT_EXPR
:
4675 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
4676 && SCALAR_INT_MODE_P (mode
))
4679 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4681 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
4683 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
4684 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
4689 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
4693 #ifdef ENABLE_CHECKING
4702 /* Return an RTX equivalent to the source bind value of the tree expression
4706 expand_debug_source_expr (tree exp
)
4709 enum machine_mode mode
= VOIDmode
, inner_mode
;
4711 switch (TREE_CODE (exp
))
4715 mode
= DECL_MODE (exp
);
4716 op0
= expand_debug_parm_decl (exp
);
4719 /* See if this isn't an argument that has been completely
4721 if (!DECL_RTL_SET_P (exp
)
4722 && !DECL_INCOMING_RTL (exp
)
4723 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
4725 tree aexp
= DECL_ORIGIN (exp
);
4726 if (DECL_CONTEXT (aexp
)
4727 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
4729 vec
<tree
, va_gc
> **debug_args
;
4732 debug_args
= decl_debug_args_lookup (current_function_decl
);
4733 if (debug_args
!= NULL
)
4735 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
4738 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
4748 if (op0
== NULL_RTX
)
4751 inner_mode
= GET_MODE (op0
);
4752 if (mode
== inner_mode
)
4755 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4757 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
4758 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4759 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
4760 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4762 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4764 else if (FLOAT_MODE_P (mode
))
4766 else if (FLOAT_MODE_P (inner_mode
))
4768 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
4769 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4771 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4773 else if (CONSTANT_P (op0
)
4774 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
4775 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
4776 subreg_lowpart_offset (mode
, inner_mode
));
4777 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
4778 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4780 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4785 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4786 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4787 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4790 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
4794 if (exp
== NULL_RTX
)
4797 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
4802 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4803 rtx dval
= make_debug_expr_from_rtl (exp
);
4805 /* Emit a debug bind insn before INSN. */
4806 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
4807 DEBUG_EXPR_TREE_DECL (dval
), exp
,
4808 VAR_INIT_STATUS_INITIALIZED
);
4810 emit_debug_insn_before (bind
, insn
);
4815 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
4817 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
4818 switch (*format_ptr
++)
4821 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
4826 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
4827 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
4835 /* Expand the _LOCs in debug insns. We run this after expanding all
4836 regular insns, so that any variables referenced in the function
4837 will have their DECL_RTLs set. */
4840 expand_debug_locations (void)
4843 rtx_insn
*last
= get_last_insn ();
4844 int save_strict_alias
= flag_strict_aliasing
;
4846 /* New alias sets while setting up memory attributes cause
4847 -fcompare-debug failures, even though it doesn't bring about any
4849 flag_strict_aliasing
= 0;
4851 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4852 if (DEBUG_INSN_P (insn
))
4854 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
4856 rtx_insn
*prev_insn
, *insn2
;
4857 enum machine_mode mode
;
4859 if (value
== NULL_TREE
)
4863 if (INSN_VAR_LOCATION_STATUS (insn
)
4864 == VAR_INIT_STATUS_UNINITIALIZED
)
4865 val
= expand_debug_source_expr (value
);
4867 val
= expand_debug_expr (value
);
4868 gcc_assert (last
== get_last_insn ());
4872 val
= gen_rtx_UNKNOWN_VAR_LOC ();
4875 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
4877 gcc_assert (mode
== GET_MODE (val
)
4878 || (GET_MODE (val
) == VOIDmode
4879 && (CONST_SCALAR_INT_P (val
)
4880 || GET_CODE (val
) == CONST_FIXED
4881 || GET_CODE (val
) == LABEL_REF
)));
4884 INSN_VAR_LOCATION_LOC (insn
) = val
;
4885 prev_insn
= PREV_INSN (insn
);
4886 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
4887 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
4890 flag_strict_aliasing
= save_strict_alias
;
4893 /* Expand basic block BB from GIMPLE trees to RTL. */
4896 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
4898 gimple_stmt_iterator gsi
;
4907 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
4910 /* Note that since we are now transitioning from GIMPLE to RTL, we
4911 cannot use the gsi_*_bb() routines because they expect the basic
4912 block to be in GIMPLE, instead of RTL. Therefore, we need to
4913 access the BB sequence directly. */
4914 stmts
= bb_seq (bb
);
4915 bb
->il
.gimple
.seq
= NULL
;
4916 bb
->il
.gimple
.phi_nodes
= NULL
;
4917 rtl_profile_for_bb (bb
);
4918 init_rtl_bb_info (bb
);
4919 bb
->flags
|= BB_RTL
;
4921 /* Remove the RETURN_EXPR if we may fall though to the exit
4923 gsi
= gsi_last (stmts
);
4924 if (!gsi_end_p (gsi
)
4925 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
4927 gimple ret_stmt
= gsi_stmt (gsi
);
4929 gcc_assert (single_succ_p (bb
));
4930 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
4932 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
4933 && !gimple_return_retval (ret_stmt
))
4935 gsi_remove (&gsi
, false);
4936 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
4940 gsi
= gsi_start (stmts
);
4941 if (!gsi_end_p (gsi
))
4943 stmt
= gsi_stmt (gsi
);
4944 if (gimple_code (stmt
) != GIMPLE_LABEL
)
4948 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
4952 last
= get_last_insn ();
4956 expand_gimple_stmt (stmt
);
4963 /* Java emits line number notes in the top of labels.
4964 ??? Make this go away once line number notes are obsoleted. */
4965 BB_HEAD (bb
) = NEXT_INSN (last
);
4966 if (NOTE_P (BB_HEAD (bb
)))
4967 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
4968 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
4970 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
4973 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
4975 NOTE_BASIC_BLOCK (note
) = bb
;
4977 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
4981 stmt
= gsi_stmt (gsi
);
4983 /* If this statement is a non-debug one, and we generate debug
4984 insns, then this one might be the last real use of a TERed
4985 SSA_NAME, but where there are still some debug uses further
4986 down. Expanding the current SSA name in such further debug
4987 uses by their RHS might lead to wrong debug info, as coalescing
4988 might make the operands of such RHS be placed into the same
4989 pseudo as something else. Like so:
4990 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4994 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4995 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4996 the write to a_2 would actually have clobbered the place which
4999 So, instead of that, we recognize the situation, and generate
5000 debug temporaries at the last real use of TERed SSA names:
5007 if (MAY_HAVE_DEBUG_INSNS
5009 && !is_gimple_debug (stmt
))
5015 location_t sloc
= curr_insn_location ();
5017 /* Look for SSA names that have their last use here (TERed
5018 names always have only one real use). */
5019 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5020 if ((def
= get_gimple_for_ssa_name (op
)))
5022 imm_use_iterator imm_iter
;
5023 use_operand_p use_p
;
5024 bool have_debug_uses
= false;
5026 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5028 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5030 have_debug_uses
= true;
5035 if (have_debug_uses
)
5037 /* OP is a TERed SSA name, with DEF it's defining
5038 statement, and where OP is used in further debug
5039 instructions. Generate a debug temporary, and
5040 replace all uses of OP in debug insns with that
5043 tree value
= gimple_assign_rhs_to_tree (def
);
5044 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5046 enum machine_mode mode
;
5048 set_curr_insn_location (gimple_location (def
));
5050 DECL_ARTIFICIAL (vexpr
) = 1;
5051 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5053 mode
= DECL_MODE (value
);
5055 mode
= TYPE_MODE (TREE_TYPE (value
));
5056 DECL_MODE (vexpr
) = mode
;
5058 val
= gen_rtx_VAR_LOCATION
5059 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5061 emit_debug_insn (val
);
5063 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5065 if (!gimple_debug_bind_p (debugstmt
))
5068 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5069 SET_USE (use_p
, vexpr
);
5071 update_stmt (debugstmt
);
5075 set_curr_insn_location (sloc
);
5078 currently_expanding_gimple_stmt
= stmt
;
5080 /* Expand this statement, then evaluate the resulting RTL and
5081 fixup the CFG accordingly. */
5082 if (gimple_code (stmt
) == GIMPLE_COND
)
5084 new_bb
= expand_gimple_cond (bb
, stmt
);
5088 else if (gimple_debug_bind_p (stmt
))
5090 location_t sloc
= curr_insn_location ();
5091 gimple_stmt_iterator nsi
= gsi
;
5095 tree var
= gimple_debug_bind_get_var (stmt
);
5098 enum machine_mode mode
;
5100 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5101 && TREE_CODE (var
) != LABEL_DECL
5102 && !target_for_debug_bind (var
))
5103 goto delink_debug_stmt
;
5105 if (gimple_debug_bind_has_value_p (stmt
))
5106 value
= gimple_debug_bind_get_value (stmt
);
5110 last
= get_last_insn ();
5112 set_curr_insn_location (gimple_location (stmt
));
5115 mode
= DECL_MODE (var
);
5117 mode
= TYPE_MODE (TREE_TYPE (var
));
5119 val
= gen_rtx_VAR_LOCATION
5120 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5122 emit_debug_insn (val
);
5124 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5126 /* We can't dump the insn with a TREE where an RTX
5128 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5129 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5130 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5134 /* In order not to generate too many debug temporaries,
5135 we delink all uses of debug statements we already expanded.
5136 Therefore debug statements between definition and real
5137 use of TERed SSA names will continue to use the SSA name,
5138 and not be replaced with debug temps. */
5139 delink_stmt_imm_use (stmt
);
5143 if (gsi_end_p (nsi
))
5145 stmt
= gsi_stmt (nsi
);
5146 if (!gimple_debug_bind_p (stmt
))
5150 set_curr_insn_location (sloc
);
5152 else if (gimple_debug_source_bind_p (stmt
))
5154 location_t sloc
= curr_insn_location ();
5155 tree var
= gimple_debug_source_bind_get_var (stmt
);
5156 tree value
= gimple_debug_source_bind_get_value (stmt
);
5158 enum machine_mode mode
;
5160 last
= get_last_insn ();
5162 set_curr_insn_location (gimple_location (stmt
));
5164 mode
= DECL_MODE (var
);
5166 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5167 VAR_INIT_STATUS_UNINITIALIZED
);
5169 emit_debug_insn (val
);
5171 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5173 /* We can't dump the insn with a TREE where an RTX
5175 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5176 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5177 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5180 set_curr_insn_location (sloc
);
5184 if (is_gimple_call (stmt
)
5185 && gimple_call_tail_p (stmt
)
5186 && disable_tail_calls
)
5187 gimple_call_set_tail (stmt
, false);
5189 if (is_gimple_call (stmt
) && gimple_call_tail_p (stmt
))
5192 new_bb
= expand_gimple_tailcall (bb
, stmt
, &can_fallthru
);
5203 def_operand_p def_p
;
5204 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5208 /* Ignore this stmt if it is in the list of
5209 replaceable expressions. */
5211 && bitmap_bit_p (SA
.values
,
5212 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5215 last
= expand_gimple_stmt (stmt
);
5216 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5221 currently_expanding_gimple_stmt
= NULL
;
5223 /* Expand implicit goto and convert goto_locus. */
5224 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5226 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5227 set_curr_insn_location (e
->goto_locus
);
5228 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5230 emit_jump (label_rtx_for_bb (e
->dest
));
5231 e
->flags
&= ~EDGE_FALLTHRU
;
5235 /* Expanded RTL can create a jump in the last instruction of block.
5236 This later might be assumed to be a jump to successor and break edge insertion.
5237 We need to insert dummy move to prevent this. PR41440. */
5238 if (single_succ_p (bb
)
5239 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5240 && (last
= get_last_insn ())
5243 rtx dummy
= gen_reg_rtx (SImode
);
5244 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5247 do_pending_stack_adjust ();
5249 /* Find the block tail. The last insn in the block is the insn
5250 before a barrier and/or table jump insn. */
5251 last
= get_last_insn ();
5252 if (BARRIER_P (last
))
5253 last
= PREV_INSN (last
);
5254 if (JUMP_TABLE_DATA_P (last
))
5255 last
= PREV_INSN (PREV_INSN (last
));
5258 update_bb_for_insn (bb
);
5264 /* Create a basic block for initialization code. */
5267 construct_init_block (void)
5269 basic_block init_block
, first_block
;
5273 /* Multiple entry points not supported yet. */
5274 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5275 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5276 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5277 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5278 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5280 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5282 /* When entry edge points to first basic block, we don't need jump,
5283 otherwise we have to jump into proper target. */
5284 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5286 tree label
= gimple_block_label (e
->dest
);
5288 emit_jump (label_rtx (label
));
5292 flags
= EDGE_FALLTHRU
;
5294 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5296 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5297 init_block
->frequency
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5298 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5299 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5302 first_block
= e
->dest
;
5303 redirect_edge_succ (e
, init_block
);
5304 e
= make_edge (init_block
, first_block
, flags
);
5307 e
= make_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5308 e
->probability
= REG_BR_PROB_BASE
;
5309 e
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5311 update_bb_for_insn (init_block
);
5315 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5316 found in the block tree. */
5319 set_block_levels (tree block
, int level
)
5323 BLOCK_NUMBER (block
) = level
;
5324 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5325 block
= BLOCK_CHAIN (block
);
5329 /* Create a block containing landing pads and similar stuff. */
5332 construct_exit_block (void)
5334 rtx_insn
*head
= get_last_insn ();
5336 basic_block exit_block
;
5340 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5341 rtx_insn
*orig_end
= BB_END (prev_bb
);
5343 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5345 /* Make sure the locus is set to the end of the function, so that
5346 epilogue line numbers and warnings are set properly. */
5347 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5348 input_location
= cfun
->function_end_locus
;
5350 /* Generate rtl for function exit. */
5351 expand_function_end ();
5353 end
= get_last_insn ();
5356 /* While emitting the function end we could move end of the last basic
5358 BB_END (prev_bb
) = orig_end
;
5359 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5360 head
= NEXT_INSN (head
);
5361 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5362 bb frequency counting will be confused. Any instructions before that
5363 label are emitted for the case where PREV_BB falls through into the
5364 exit block, so append those instructions to prev_bb in that case. */
5365 if (NEXT_INSN (head
) != return_label
)
5367 while (NEXT_INSN (head
) != return_label
)
5369 if (!NOTE_P (NEXT_INSN (head
)))
5370 BB_END (prev_bb
) = NEXT_INSN (head
);
5371 head
= NEXT_INSN (head
);
5374 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
5375 exit_block
->frequency
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5376 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5377 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5380 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
5382 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
5383 if (!(e
->flags
& EDGE_ABNORMAL
))
5384 redirect_edge_succ (e
, exit_block
);
5389 e
= make_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5390 e
->probability
= REG_BR_PROB_BASE
;
5391 e
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5392 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5395 e
->count
-= e2
->count
;
5396 exit_block
->count
-= e2
->count
;
5397 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
5401 if (exit_block
->count
< 0)
5402 exit_block
->count
= 0;
5403 if (exit_block
->frequency
< 0)
5404 exit_block
->frequency
= 0;
5405 update_bb_for_insn (exit_block
);
5408 /* Helper function for discover_nonconstant_array_refs.
5409 Look for ARRAY_REF nodes with non-constant indexes and mark them
5413 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
5414 void *data ATTRIBUTE_UNUSED
)
5418 if (IS_TYPE_OR_DECL_P (t
))
5420 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5422 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5423 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
5424 && (!TREE_OPERAND (t
, 2)
5425 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5426 || (TREE_CODE (t
) == COMPONENT_REF
5427 && (!TREE_OPERAND (t
,2)
5428 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5429 || TREE_CODE (t
) == BIT_FIELD_REF
5430 || TREE_CODE (t
) == REALPART_EXPR
5431 || TREE_CODE (t
) == IMAGPART_EXPR
5432 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
5433 || CONVERT_EXPR_P (t
))
5434 t
= TREE_OPERAND (t
, 0);
5436 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5438 t
= get_base_address (t
);
5440 && DECL_MODE (t
) != BLKmode
)
5441 TREE_ADDRESSABLE (t
) = 1;
5450 /* RTL expansion is not able to compile array references with variable
5451 offsets for arrays stored in single register. Discover such
5452 expressions and mark variables as addressable to avoid this
5456 discover_nonconstant_array_refs (void)
5459 gimple_stmt_iterator gsi
;
5461 FOR_EACH_BB_FN (bb
, cfun
)
5462 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5464 gimple stmt
= gsi_stmt (gsi
);
5465 if (!is_gimple_debug (stmt
))
5466 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
5470 /* This function sets crtl->args.internal_arg_pointer to a virtual
5471 register if DRAP is needed. Local register allocator will replace
5472 virtual_incoming_args_rtx with the virtual register. */
5475 expand_stack_alignment (void)
5478 unsigned int preferred_stack_boundary
;
5480 if (! SUPPORTS_STACK_ALIGNMENT
)
5483 if (cfun
->calls_alloca
5484 || cfun
->has_nonlocal_label
5485 || crtl
->has_nonlocal_goto
)
5486 crtl
->need_drap
= true;
5488 /* Call update_stack_boundary here again to update incoming stack
5489 boundary. It may set incoming stack alignment to a different
5490 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5491 use the minimum incoming stack alignment to check if it is OK
5492 to perform sibcall optimization since sibcall optimization will
5493 only align the outgoing stack to incoming stack boundary. */
5494 if (targetm
.calls
.update_stack_boundary
)
5495 targetm
.calls
.update_stack_boundary ();
5497 /* The incoming stack frame has to be aligned at least at
5498 parm_stack_boundary. */
5499 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
5501 /* Update crtl->stack_alignment_estimated and use it later to align
5502 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5503 exceptions since callgraph doesn't collect incoming stack alignment
5505 if (cfun
->can_throw_non_call_exceptions
5506 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
5507 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
5509 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
5510 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
5511 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
5512 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
5513 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
5515 gcc_assert (crtl
->stack_alignment_needed
5516 <= crtl
->stack_alignment_estimated
);
5518 crtl
->stack_realign_needed
5519 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
5520 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
5522 crtl
->stack_realign_processed
= true;
5524 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5526 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
5527 drap_rtx
= targetm
.calls
.get_drap_rtx ();
5529 /* stack_realign_drap and drap_rtx must match. */
5530 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
5532 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5533 if (NULL
!= drap_rtx
)
5535 crtl
->args
.internal_arg_pointer
= drap_rtx
;
5537 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5539 fixup_tail_calls ();
5545 expand_main_function (void)
5547 #if (defined(INVOKE__main) \
5548 || (!defined(HAS_INIT_SECTION) \
5549 && !defined(INIT_SECTION_ASM_OP) \
5550 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5551 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
5556 /* Expand code to initialize the stack_protect_guard. This is invoked at
5557 the beginning of a function to be protected. */
5559 #ifndef HAVE_stack_protect_set
5560 # define HAVE_stack_protect_set 0
5561 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5565 stack_protect_prologue (void)
5567 tree guard_decl
= targetm
.stack_protect_guard ();
5570 x
= expand_normal (crtl
->stack_protect_guard
);
5571 y
= expand_normal (guard_decl
);
5573 /* Allow the target to copy from Y to X without leaking Y into a
5575 if (HAVE_stack_protect_set
)
5577 rtx insn
= gen_stack_protect_set (x
, y
);
5585 /* Otherwise do a straight move. */
5586 emit_move_insn (x
, y
);
5589 /* Translate the intermediate representation contained in the CFG
5590 from GIMPLE trees to RTL.
5592 We do conversion per basic block and preserve/update the tree CFG.
5593 This implies we have to do some magic as the CFG can simultaneously
5594 consist of basic blocks containing RTL and GIMPLE trees. This can
5595 confuse the CFG hooks, so be careful to not manipulate CFG during
5600 const pass_data pass_data_expand
=
5602 RTL_PASS
, /* type */
5603 "expand", /* name */
5604 OPTGROUP_NONE
, /* optinfo_flags */
5605 TV_EXPAND
, /* tv_id */
5606 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
5608 | PROP_gimple_lvec
), /* properties_required */
5609 PROP_rtl
, /* properties_provided */
5610 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
5611 0, /* todo_flags_start */
5612 0, /* todo_flags_finish */
5615 class pass_expand
: public rtl_opt_pass
5618 pass_expand (gcc::context
*ctxt
)
5619 : rtl_opt_pass (pass_data_expand
, ctxt
)
5622 /* opt_pass methods: */
5623 virtual unsigned int execute (function
*);
5625 }; // class pass_expand
5628 pass_expand::execute (function
*fun
)
5630 basic_block bb
, init_block
;
5634 rtx_insn
*var_seq
, *var_ret_seq
;
5637 timevar_push (TV_OUT_OF_SSA
);
5638 rewrite_out_of_ssa (&SA
);
5639 timevar_pop (TV_OUT_OF_SSA
);
5640 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
5642 /* Make sure all values used by the optimization passes have sane
5646 /* Some backends want to know that we are expanding to RTL. */
5647 currently_expanding_to_rtl
= 1;
5648 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5649 free_dominance_info (CDI_DOMINATORS
);
5651 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
5653 insn_locations_init ();
5654 if (!DECL_IS_BUILTIN (current_function_decl
))
5656 /* Eventually, all FEs should explicitly set function_start_locus. */
5657 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
5658 set_curr_insn_location
5659 (DECL_SOURCE_LOCATION (current_function_decl
));
5661 set_curr_insn_location (fun
->function_start_locus
);
5664 set_curr_insn_location (UNKNOWN_LOCATION
);
5665 prologue_location
= curr_insn_location ();
5667 #ifdef INSN_SCHEDULING
5668 init_sched_attrs ();
5671 /* Make sure first insn is a note even if we don't want linenums.
5672 This makes sure the first insn will never be deleted.
5673 Also, final expects a note to appear there. */
5674 emit_note (NOTE_INSN_DELETED
);
5676 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5677 discover_nonconstant_array_refs ();
5679 targetm
.expand_to_rtl_hook ();
5680 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
5681 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
5682 crtl
->stack_alignment_estimated
= 0;
5683 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
5684 fun
->cfg
->max_jumptable_ents
= 0;
5686 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5687 of the function section at exapnsion time to predict distance of calls. */
5688 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
5690 /* Expand the variables recorded during gimple lowering. */
5691 timevar_push (TV_VAR_EXPAND
);
5694 var_ret_seq
= expand_used_vars ();
5696 var_seq
= get_insns ();
5698 timevar_pop (TV_VAR_EXPAND
);
5700 /* Honor stack protection warnings. */
5701 if (warn_stack_protect
)
5703 if (fun
->calls_alloca
)
5704 warning (OPT_Wstack_protector
,
5705 "stack protector not protecting local variables: "
5706 "variable length buffer");
5707 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
5708 warning (OPT_Wstack_protector
,
5709 "stack protector not protecting function: "
5710 "all local arrays are less than %d bytes long",
5711 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
5714 /* Set up parameters and prepare for return, for the function. */
5715 expand_function_start (current_function_decl
);
5717 /* If we emitted any instructions for setting up the variables,
5718 emit them before the FUNCTION_START note. */
5721 emit_insn_before (var_seq
, parm_birth_insn
);
5723 /* In expand_function_end we'll insert the alloca save/restore
5724 before parm_birth_insn. We've just insertted an alloca call.
5725 Adjust the pointer to match. */
5726 parm_birth_insn
= var_seq
;
5729 /* Now that we also have the parameter RTXs, copy them over to our
5731 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
5733 tree var
= SSA_NAME_VAR (partition_to_var (SA
.map
, i
));
5735 if (TREE_CODE (var
) != VAR_DECL
5736 && !SA
.partition_to_pseudo
[i
])
5737 SA
.partition_to_pseudo
[i
] = DECL_RTL_IF_SET (var
);
5738 gcc_assert (SA
.partition_to_pseudo
[i
]);
5740 /* If this decl was marked as living in multiple places, reset
5741 this now to NULL. */
5742 if (DECL_RTL_IF_SET (var
) == pc_rtx
)
5743 SET_DECL_RTL (var
, NULL
);
5745 /* Some RTL parts really want to look at DECL_RTL(x) when x
5746 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5747 SET_DECL_RTL here making this available, but that would mean
5748 to select one of the potentially many RTLs for one DECL. Instead
5749 of doing that we simply reset the MEM_EXPR of the RTL in question,
5750 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5751 if (!DECL_RTL_SET_P (var
))
5753 if (MEM_P (SA
.partition_to_pseudo
[i
]))
5754 set_mem_expr (SA
.partition_to_pseudo
[i
], NULL
);
5758 /* If we have a class containing differently aligned pointers
5759 we need to merge those into the corresponding RTL pointer
5761 for (i
= 1; i
< num_ssa_names
; i
++)
5763 tree name
= ssa_name (i
);
5768 /* We might have generated new SSA names in
5769 update_alias_info_with_stack_vars. They will have a NULL
5770 defining statements, and won't be part of the partitioning,
5772 || !SSA_NAME_DEF_STMT (name
))
5774 part
= var_to_partition (SA
.map
, name
);
5775 if (part
== NO_PARTITION
)
5778 /* Adjust all partition members to get the underlying decl of
5779 the representative which we might have created in expand_one_var. */
5780 if (SSA_NAME_VAR (name
) == NULL_TREE
)
5782 tree leader
= partition_to_var (SA
.map
, part
);
5783 gcc_assert (SSA_NAME_VAR (leader
) != NULL_TREE
);
5784 replace_ssa_name_symbol (name
, SSA_NAME_VAR (leader
));
5786 if (!POINTER_TYPE_P (TREE_TYPE (name
)))
5789 r
= SA
.partition_to_pseudo
[part
];
5791 mark_reg_pointer (r
, get_pointer_alignment (name
));
5794 /* If this function is `main', emit a call to `__main'
5795 to run global initializers, etc. */
5796 if (DECL_NAME (current_function_decl
)
5797 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
5798 && DECL_FILE_SCOPE_P (current_function_decl
))
5799 expand_main_function ();
5801 /* Initialize the stack_protect_guard field. This must happen after the
5802 call to __main (if any) so that the external decl is initialized. */
5803 if (crtl
->stack_protect_guard
)
5804 stack_protect_prologue ();
5806 expand_phi_nodes (&SA
);
5808 /* Register rtl specific functions for cfg. */
5809 rtl_register_cfg_hooks ();
5811 init_block
= construct_init_block ();
5813 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5814 remaining edges later. */
5815 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
5816 e
->flags
&= ~EDGE_EXECUTABLE
;
5818 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
5819 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
5821 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
5823 if (MAY_HAVE_DEBUG_INSNS
)
5824 expand_debug_locations ();
5826 /* Free stuff we no longer need after GIMPLE optimizations. */
5827 free_dominance_info (CDI_DOMINATORS
);
5828 free_dominance_info (CDI_POST_DOMINATORS
);
5829 delete_tree_cfg_annotations ();
5831 timevar_push (TV_OUT_OF_SSA
);
5832 finish_out_of_ssa (&SA
);
5833 timevar_pop (TV_OUT_OF_SSA
);
5835 timevar_push (TV_POST_EXPAND
);
5836 /* We are no longer in SSA form. */
5837 fun
->gimple_df
->in_ssa_p
= false;
5838 loops_state_clear (LOOP_CLOSED_SSA
);
5840 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5841 conservatively to true until they are all profile aware. */
5842 delete lab_rtx_for_bb
;
5845 construct_exit_block ();
5846 insn_locations_finalize ();
5850 rtx_insn
*after
= return_label
;
5851 rtx_insn
*next
= NEXT_INSN (after
);
5852 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
5854 emit_insn_after (var_ret_seq
, after
);
5857 /* Zap the tree EH table. */
5858 set_eh_throw_stmt_table (fun
, NULL
);
5860 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5861 split edges which edge insertions might do. */
5862 rebuild_jump_labels (get_insns ());
5864 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
5865 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
5869 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
5873 rebuild_jump_labels_chain (e
->insns
.r
);
5874 /* Put insns after parm birth, but before
5875 NOTE_INSNS_FUNCTION_BEG. */
5876 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
5877 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
5879 rtx_insn
*insns
= e
->insns
.r
;
5881 if (NOTE_P (parm_birth_insn
)
5882 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
5883 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
5885 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
5888 commit_one_edge_insertion (e
);
5895 /* We're done expanding trees to RTL. */
5896 currently_expanding_to_rtl
= 0;
5898 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
5899 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
5903 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
5905 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5906 e
->flags
&= ~EDGE_EXECUTABLE
;
5908 /* At the moment not all abnormal edges match the RTL
5909 representation. It is safe to remove them here as
5910 find_many_sub_basic_blocks will rediscover them.
5911 In the future we should get this fixed properly. */
5912 if ((e
->flags
& EDGE_ABNORMAL
)
5913 && !(e
->flags
& EDGE_SIBCALL
))
5920 blocks
= sbitmap_alloc (last_basic_block_for_fn (fun
));
5921 bitmap_ones (blocks
);
5922 find_many_sub_basic_blocks (blocks
);
5923 sbitmap_free (blocks
);
5924 purge_all_dead_edges ();
5926 expand_stack_alignment ();
5928 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5930 if (crtl
->tail_call_emit
)
5931 fixup_tail_calls ();
5933 /* After initial rtl generation, call back to finish generating
5934 exception support code. We need to do this before cleaning up
5935 the CFG as the code does not expect dead landing pads. */
5936 if (fun
->eh
->region_tree
!= NULL
)
5937 finish_eh_generation ();
5939 /* Remove unreachable blocks, otherwise we cannot compute dominators
5940 which are needed for loop state verification. As a side-effect
5941 this also compacts blocks.
5942 ??? We cannot remove trivially dead insns here as for example
5943 the DRAP reg on i?86 is not magically live at this point.
5944 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5945 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
5947 #ifdef ENABLE_CHECKING
5948 verify_flow_info ();
5951 /* Initialize pseudos allocated for hard registers. */
5952 emit_initial_value_sets ();
5954 /* And finally unshare all RTL. */
5957 /* There's no need to defer outputting this function any more; we
5958 know we want to output it. */
5959 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
5961 /* Now that we're done expanding trees to RTL, we shouldn't have any
5962 more CONCATs anywhere. */
5963 generating_concat_p
= 0;
5968 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5969 /* And the pass manager will dump RTL for us. */
5972 /* If we're emitting a nested function, make sure its parent gets
5973 emitted as well. Doing otherwise confuses debug info. */
5976 for (parent
= DECL_CONTEXT (current_function_decl
);
5977 parent
!= NULL_TREE
;
5978 parent
= get_containing_scope (parent
))
5979 if (TREE_CODE (parent
) == FUNCTION_DECL
)
5980 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
5983 /* We are now committed to emitting code for this function. Do any
5984 preparation, such as emitting abstract debug info for the inline
5985 before it gets mangled by optimization. */
5986 if (cgraph_function_possibly_inlined_p (current_function_decl
))
5987 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
5989 TREE_ASM_WRITTEN (current_function_decl
) = 1;
5991 /* After expanding, the return labels are no longer needed. */
5992 return_label
= NULL
;
5993 naked_return_label
= NULL
;
5995 /* After expanding, the tm_restart map is no longer needed. */
5996 if (fun
->gimple_df
->tm_restart
)
5998 htab_delete (fun
->gimple_df
->tm_restart
);
5999 fun
->gimple_df
->tm_restart
= NULL
;
6002 /* Tag the blocks with a depth number so that change_scope can find
6003 the common parent easily. */
6004 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6005 default_rtl_profile ();
6007 timevar_pop (TV_POST_EXPAND
);
6015 make_pass_expand (gcc::context
*ctxt
)
6017 return new pass_expand (ctxt
);