1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
25 #include "hard-reg-set.h"
30 #include "fold-const.h"
31 #include "stringpool.h"
33 #include "stor-layout.h"
35 #include "print-tree.h"
39 #include "dominance.h"
44 #include "cfgcleanup.h"
45 #include "basic-block.h"
46 #include "insn-codes.h"
49 #include "insn-config.h"
56 #include "langhooks.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
61 #include "gimple-expr.h"
64 #include "gimple-iterator.h"
65 #include "gimple-walk.h"
66 #include "gimple-ssa.h"
67 #include "plugin-api.h"
71 #include "tree-phinodes.h"
72 #include "ssa-iterators.h"
73 #include "tree-ssanames.h"
76 #include "tree-pass.h"
78 #include "diagnostic.h"
79 #include "gimple-pretty-print.h"
83 #include "tree-inline.h"
84 #include "value-prof.h"
86 #include "tree-ssa-live.h"
87 #include "tree-outof-ssa.h"
90 #include "regs.h" /* For reg_renumber. */
91 #include "insn-attr.h" /* For INSN_SCHEDULING. */
93 #include "tree-ssa-address.h"
97 #include "tree-chkp.h"
100 /* Some systems use __main in a way incompatible with its use in gcc, in these
101 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
102 give the same symbol without quotes for an alternative entry point. You
103 must define both, or neither. */
105 #define NAME__MAIN "__main"
108 /* This variable holds information helping the rewriting of SSA trees
112 /* This variable holds the currently expanded gimple statement for purposes
113 of comminucating the profile info to the builtin expanders. */
114 gimple currently_expanding_gimple_stmt
;
116 static rtx
expand_debug_expr (tree
);
118 /* Return an expression tree corresponding to the RHS of GIMPLE
122 gimple_assign_rhs_to_tree (gimple stmt
)
125 enum gimple_rhs_class grhs_class
;
127 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
129 if (grhs_class
== GIMPLE_TERNARY_RHS
)
130 t
= build3 (gimple_assign_rhs_code (stmt
),
131 TREE_TYPE (gimple_assign_lhs (stmt
)),
132 gimple_assign_rhs1 (stmt
),
133 gimple_assign_rhs2 (stmt
),
134 gimple_assign_rhs3 (stmt
));
135 else if (grhs_class
== GIMPLE_BINARY_RHS
)
136 t
= build2 (gimple_assign_rhs_code (stmt
),
137 TREE_TYPE (gimple_assign_lhs (stmt
)),
138 gimple_assign_rhs1 (stmt
),
139 gimple_assign_rhs2 (stmt
));
140 else if (grhs_class
== GIMPLE_UNARY_RHS
)
141 t
= build1 (gimple_assign_rhs_code (stmt
),
142 TREE_TYPE (gimple_assign_lhs (stmt
)),
143 gimple_assign_rhs1 (stmt
));
144 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
146 t
= gimple_assign_rhs1 (stmt
);
147 /* Avoid modifying this tree in place below. */
148 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
149 && gimple_location (stmt
) != EXPR_LOCATION (t
))
150 || (gimple_block (stmt
)
151 && currently_expanding_to_rtl
158 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
159 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
165 #ifndef STACK_ALIGNMENT_NEEDED
166 #define STACK_ALIGNMENT_NEEDED 1
169 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
171 /* Associate declaration T with storage space X. If T is no
172 SSA name this is exactly SET_DECL_RTL, otherwise make the
173 partition of T associated with X. */
175 set_rtl (tree t
, rtx x
)
177 if (TREE_CODE (t
) == SSA_NAME
)
179 SA
.partition_to_pseudo
[var_to_partition (SA
.map
, t
)] = x
;
181 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t
), x
);
182 /* For the benefit of debug information at -O0 (where vartracking
183 doesn't run) record the place also in the base DECL if it's
184 a normal variable (not a parameter). */
185 if (x
&& x
!= pc_rtx
&& TREE_CODE (SSA_NAME_VAR (t
)) == VAR_DECL
)
187 tree var
= SSA_NAME_VAR (t
);
188 /* If we don't yet have something recorded, just record it now. */
189 if (!DECL_RTL_SET_P (var
))
190 SET_DECL_RTL (var
, x
);
191 /* If we have it set already to "multiple places" don't
193 else if (DECL_RTL (var
) == pc_rtx
)
195 /* If we have something recorded and it's not the same place
196 as we want to record now, we have multiple partitions for the
197 same base variable, with different places. We can't just
198 randomly chose one, hence we have to say that we don't know.
199 This only happens with optimization, and there var-tracking
200 will figure out the right thing. */
201 else if (DECL_RTL (var
) != x
)
202 SET_DECL_RTL (var
, pc_rtx
);
209 /* This structure holds data relevant to one variable that will be
210 placed in a stack slot. */
216 /* Initially, the size of the variable. Later, the size of the partition,
217 if this variable becomes it's partition's representative. */
220 /* The *byte* alignment required for this variable. Or as, with the
221 size, the alignment for this partition. */
224 /* The partition representative. */
225 size_t representative
;
227 /* The next stack variable in the partition, or EOC. */
230 /* The numbers of conflicting stack variables. */
234 #define EOC ((size_t)-1)
236 /* We have an array of such objects while deciding allocation. */
237 static struct stack_var
*stack_vars
;
238 static size_t stack_vars_alloc
;
239 static size_t stack_vars_num
;
240 static hash_map
<tree
, size_t> *decl_to_stack_part
;
242 /* Conflict bitmaps go on this obstack. This allows us to destroy
243 all of them in one big sweep. */
244 static bitmap_obstack stack_var_bitmap_obstack
;
246 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
247 is non-decreasing. */
248 static size_t *stack_vars_sorted
;
250 /* The phase of the stack frame. This is the known misalignment of
251 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
252 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
253 static int frame_phase
;
255 /* Used during expand_used_vars to remember if we saw any decls for
256 which we'd like to enable stack smashing protection. */
257 static bool has_protected_decls
;
259 /* Used during expand_used_vars. Remember if we say a character buffer
260 smaller than our cutoff threshold. Used for -Wstack-protector. */
261 static bool has_short_buffer
;
263 /* Compute the byte alignment to use for DECL. Ignore alignment
264 we can't do with expected alignment of the stack boundary. */
267 align_local_variable (tree decl
)
269 unsigned int align
= LOCAL_DECL_ALIGNMENT (decl
);
270 DECL_ALIGN (decl
) = align
;
271 return align
/ BITS_PER_UNIT
;
274 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
275 down otherwise. Return truncated BASE value. */
277 static inline unsigned HOST_WIDE_INT
278 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
280 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
283 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
284 Return the frame offset. */
287 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
289 HOST_WIDE_INT offset
, new_frame_offset
;
291 if (FRAME_GROWS_DOWNWARD
)
294 = align_base (frame_offset
- frame_phase
- size
,
295 align
, false) + frame_phase
;
296 offset
= new_frame_offset
;
301 = align_base (frame_offset
- frame_phase
, align
, true) + frame_phase
;
302 offset
= new_frame_offset
;
303 new_frame_offset
+= size
;
305 frame_offset
= new_frame_offset
;
307 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
308 frame_offset
= offset
= 0;
313 /* Accumulate DECL into STACK_VARS. */
316 add_stack_var (tree decl
)
320 if (stack_vars_num
>= stack_vars_alloc
)
322 if (stack_vars_alloc
)
323 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
325 stack_vars_alloc
= 32;
327 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
329 if (!decl_to_stack_part
)
330 decl_to_stack_part
= new hash_map
<tree
, size_t>;
332 v
= &stack_vars
[stack_vars_num
];
333 decl_to_stack_part
->put (decl
, stack_vars_num
);
336 v
->size
= tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl
)));
337 /* Ensure that all variables have size, so that &a != &b for any two
338 variables that are simultaneously live. */
341 v
->alignb
= align_local_variable (SSAVAR (decl
));
342 /* An alignment of zero can mightily confuse us later. */
343 gcc_assert (v
->alignb
!= 0);
345 /* All variables are initially in their own partition. */
346 v
->representative
= stack_vars_num
;
349 /* All variables initially conflict with no other. */
352 /* Ensure that this decl doesn't get put onto the list twice. */
353 set_rtl (decl
, pc_rtx
);
358 /* Make the decls associated with luid's X and Y conflict. */
361 add_stack_var_conflict (size_t x
, size_t y
)
363 struct stack_var
*a
= &stack_vars
[x
];
364 struct stack_var
*b
= &stack_vars
[y
];
366 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
368 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
369 bitmap_set_bit (a
->conflicts
, y
);
370 bitmap_set_bit (b
->conflicts
, x
);
373 /* Check whether the decls associated with luid's X and Y conflict. */
376 stack_var_conflict_p (size_t x
, size_t y
)
378 struct stack_var
*a
= &stack_vars
[x
];
379 struct stack_var
*b
= &stack_vars
[y
];
382 /* Partitions containing an SSA name result from gimple registers
383 with things like unsupported modes. They are top-level and
384 hence conflict with everything else. */
385 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
388 if (!a
->conflicts
|| !b
->conflicts
)
390 return bitmap_bit_p (a
->conflicts
, y
);
393 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
394 enter its partition number into bitmap DATA. */
397 visit_op (gimple
, tree op
, tree
, void *data
)
399 bitmap active
= (bitmap
)data
;
400 op
= get_base_address (op
);
403 && DECL_RTL_IF_SET (op
) == pc_rtx
)
405 size_t *v
= decl_to_stack_part
->get (op
);
407 bitmap_set_bit (active
, *v
);
412 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
413 record conflicts between it and all currently active other partitions
417 visit_conflict (gimple
, tree op
, tree
, void *data
)
419 bitmap active
= (bitmap
)data
;
420 op
= get_base_address (op
);
423 && DECL_RTL_IF_SET (op
) == pc_rtx
)
425 size_t *v
= decl_to_stack_part
->get (op
);
426 if (v
&& bitmap_set_bit (active
, *v
))
431 gcc_assert (num
< stack_vars_num
);
432 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
433 add_stack_var_conflict (num
, i
);
439 /* Helper routine for add_scope_conflicts, calculating the active partitions
440 at the end of BB, leaving the result in WORK. We're called to generate
441 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
445 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
449 gimple_stmt_iterator gsi
;
450 walk_stmt_load_store_addr_fn visit
;
453 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
454 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
458 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
460 gimple stmt
= gsi_stmt (gsi
);
461 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
463 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
465 gimple stmt
= gsi_stmt (gsi
);
467 if (gimple_clobber_p (stmt
))
469 tree lhs
= gimple_assign_lhs (stmt
);
471 /* Nested function lowering might introduce LHSs
472 that are COMPONENT_REFs. */
473 if (TREE_CODE (lhs
) != VAR_DECL
)
475 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
476 && (v
= decl_to_stack_part
->get (lhs
)))
477 bitmap_clear_bit (work
, *v
);
479 else if (!is_gimple_debug (stmt
))
482 && visit
== visit_op
)
484 /* If this is the first real instruction in this BB we need
485 to add conflicts for everything live at this point now.
486 Unlike classical liveness for named objects we can't
487 rely on seeing a def/use of the names we're interested in.
488 There might merely be indirect loads/stores. We'd not add any
489 conflicts for such partitions. */
492 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
494 struct stack_var
*a
= &stack_vars
[i
];
496 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
497 bitmap_ior_into (a
->conflicts
, work
);
499 visit
= visit_conflict
;
501 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
506 /* Generate stack partition conflicts between all partitions that are
507 simultaneously live. */
510 add_scope_conflicts (void)
514 bitmap work
= BITMAP_ALLOC (NULL
);
518 /* We approximate the live range of a stack variable by taking the first
519 mention of its name as starting point(s), and by the end-of-scope
520 death clobber added by gimplify as ending point(s) of the range.
521 This overapproximates in the case we for instance moved an address-taken
522 operation upward, without also moving a dereference to it upwards.
523 But it's conservatively correct as a variable never can hold values
524 before its name is mentioned at least once.
526 We then do a mostly classical bitmap liveness algorithm. */
528 FOR_ALL_BB_FN (bb
, cfun
)
529 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
531 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
532 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
539 for (i
= 0; i
< n_bbs
; i
++)
542 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
543 active
= (bitmap
)bb
->aux
;
544 add_scope_conflicts_1 (bb
, work
, false);
545 if (bitmap_ior_into (active
, work
))
550 FOR_EACH_BB_FN (bb
, cfun
)
551 add_scope_conflicts_1 (bb
, work
, true);
555 FOR_ALL_BB_FN (bb
, cfun
)
556 BITMAP_FREE (bb
->aux
);
559 /* A subroutine of partition_stack_vars. A comparison function for qsort,
560 sorting an array of indices by the properties of the object. */
563 stack_var_cmp (const void *a
, const void *b
)
565 size_t ia
= *(const size_t *)a
;
566 size_t ib
= *(const size_t *)b
;
567 unsigned int aligna
= stack_vars
[ia
].alignb
;
568 unsigned int alignb
= stack_vars
[ib
].alignb
;
569 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
570 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
571 tree decla
= stack_vars
[ia
].decl
;
572 tree declb
= stack_vars
[ib
].decl
;
574 unsigned int uida
, uidb
;
576 /* Primary compare on "large" alignment. Large comes first. */
577 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
578 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
579 if (largea
!= largeb
)
580 return (int)largeb
- (int)largea
;
582 /* Secondary compare on size, decreasing */
588 /* Tertiary compare on true alignment, decreasing. */
594 /* Final compare on ID for sort stability, increasing.
595 Two SSA names are compared by their version, SSA names come before
596 non-SSA names, and two normal decls are compared by their DECL_UID. */
597 if (TREE_CODE (decla
) == SSA_NAME
)
599 if (TREE_CODE (declb
) == SSA_NAME
)
600 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
604 else if (TREE_CODE (declb
) == SSA_NAME
)
607 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
615 struct part_traits
: default_hashmap_traits
620 { return e
.m_value
== reinterpret_cast<void *> (1); }
622 template<typename T
> static bool is_empty (T
&e
) { return e
.m_value
== NULL
; }
626 { e
.m_value
= reinterpret_cast<T
> (1); }
631 { e
.m_value
= NULL
; }
634 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
636 /* If the points-to solution *PI points to variables that are in a partition
637 together with other variables add all partition members to the pointed-to
641 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
642 part_hashmap
*decls_to_partitions
,
643 hash_set
<bitmap
> *visited
, bitmap temp
)
651 /* The pointed-to vars bitmap is shared, it is enough to
653 || visited
->add (pt
->vars
))
658 /* By using a temporary bitmap to store all members of the partitions
659 we have to add we make sure to visit each of the partitions only
661 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
663 || !bitmap_bit_p (temp
, i
))
664 && (part
= decls_to_partitions
->get (i
)))
665 bitmap_ior_into (temp
, *part
);
666 if (!bitmap_empty_p (temp
))
667 bitmap_ior_into (pt
->vars
, temp
);
670 /* Update points-to sets based on partition info, so we can use them on RTL.
671 The bitmaps representing stack partitions will be saved until expand,
672 where partitioned decls used as bases in memory expressions will be
676 update_alias_info_with_stack_vars (void)
678 part_hashmap
*decls_to_partitions
= NULL
;
680 tree var
= NULL_TREE
;
682 for (i
= 0; i
< stack_vars_num
; i
++)
686 struct ptr_info_def
*pi
;
688 /* Not interested in partitions with single variable. */
689 if (stack_vars
[i
].representative
!= i
690 || stack_vars
[i
].next
== EOC
)
693 if (!decls_to_partitions
)
695 decls_to_partitions
= new part_hashmap
;
696 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
699 /* Create an SSA_NAME that points to the partition for use
700 as base during alias-oracle queries on RTL for bases that
701 have been partitioned. */
702 if (var
== NULL_TREE
)
703 var
= create_tmp_var (ptr_type_node
);
704 name
= make_ssa_name (var
);
706 /* Create bitmaps representing partitions. They will be used for
707 points-to sets later, so use GGC alloc. */
708 part
= BITMAP_GGC_ALLOC ();
709 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
711 tree decl
= stack_vars
[j
].decl
;
712 unsigned int uid
= DECL_PT_UID (decl
);
713 bitmap_set_bit (part
, uid
);
714 decls_to_partitions
->put (uid
, part
);
715 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
716 if (TREE_ADDRESSABLE (decl
))
717 TREE_ADDRESSABLE (name
) = 1;
720 /* Make the SSA name point to all partition members. */
721 pi
= get_ptr_info (name
);
722 pt_solution_set (&pi
->pt
, part
, false);
725 /* Make all points-to sets that contain one member of a partition
726 contain all members of the partition. */
727 if (decls_to_partitions
)
730 hash_set
<bitmap
> visited
;
731 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
733 for (i
= 1; i
< num_ssa_names
; i
++)
735 tree name
= ssa_name (i
);
736 struct ptr_info_def
*pi
;
739 && POINTER_TYPE_P (TREE_TYPE (name
))
740 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
741 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
745 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
746 decls_to_partitions
, &visited
, temp
);
748 delete decls_to_partitions
;
753 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
754 partitioning algorithm. Partitions A and B are known to be non-conflicting.
755 Merge them into a single partition A. */
758 union_stack_vars (size_t a
, size_t b
)
760 struct stack_var
*vb
= &stack_vars
[b
];
764 gcc_assert (stack_vars
[b
].next
== EOC
);
765 /* Add B to A's partition. */
766 stack_vars
[b
].next
= stack_vars
[a
].next
;
767 stack_vars
[b
].representative
= a
;
768 stack_vars
[a
].next
= b
;
770 /* Update the required alignment of partition A to account for B. */
771 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
772 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
774 /* Update the interference graph and merge the conflicts. */
777 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
778 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
779 BITMAP_FREE (vb
->conflicts
);
783 /* A subroutine of expand_used_vars. Binpack the variables into
784 partitions constrained by the interference graph. The overall
785 algorithm used is as follows:
787 Sort the objects by size in descending order.
792 Look for the largest non-conflicting object B with size <= S.
799 partition_stack_vars (void)
801 size_t si
, sj
, n
= stack_vars_num
;
803 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
804 for (si
= 0; si
< n
; ++si
)
805 stack_vars_sorted
[si
] = si
;
810 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
812 for (si
= 0; si
< n
; ++si
)
814 size_t i
= stack_vars_sorted
[si
];
815 unsigned int ialign
= stack_vars
[i
].alignb
;
816 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
818 /* Ignore objects that aren't partition representatives. If we
819 see a var that is not a partition representative, it must
820 have been merged earlier. */
821 if (stack_vars
[i
].representative
!= i
)
824 for (sj
= si
+ 1; sj
< n
; ++sj
)
826 size_t j
= stack_vars_sorted
[sj
];
827 unsigned int jalign
= stack_vars
[j
].alignb
;
828 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
830 /* Ignore objects that aren't partition representatives. */
831 if (stack_vars
[j
].representative
!= j
)
834 /* Do not mix objects of "small" (supported) alignment
835 and "large" (unsupported) alignment. */
836 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
837 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
840 /* For Address Sanitizer do not mix objects with different
841 sizes, as the shorter vars wouldn't be adequately protected.
842 Don't do that for "large" (unsupported) alignment objects,
843 those aren't protected anyway. */
844 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
&& isize
!= jsize
845 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
848 /* Ignore conflicting objects. */
849 if (stack_var_conflict_p (i
, j
))
852 /* UNION the objects, placing J at OFFSET. */
853 union_stack_vars (i
, j
);
857 update_alias_info_with_stack_vars ();
860 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
863 dump_stack_var_partition (void)
865 size_t si
, i
, j
, n
= stack_vars_num
;
867 for (si
= 0; si
< n
; ++si
)
869 i
= stack_vars_sorted
[si
];
871 /* Skip variables that aren't partition representatives, for now. */
872 if (stack_vars
[i
].representative
!= i
)
875 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
876 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
877 stack_vars
[i
].alignb
);
879 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
881 fputc ('\t', dump_file
);
882 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
884 fputc ('\n', dump_file
);
888 /* Assign rtl to DECL at BASE + OFFSET. */
891 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
892 HOST_WIDE_INT offset
)
897 /* If this fails, we've overflowed the stack frame. Error nicely? */
898 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
900 x
= plus_constant (Pmode
, base
, offset
);
901 x
= gen_rtx_MEM (DECL_MODE (SSAVAR (decl
)), x
);
903 if (TREE_CODE (decl
) != SSA_NAME
)
905 /* Set alignment we actually gave this decl if it isn't an SSA name.
906 If it is we generate stack slots only accidentally so it isn't as
907 important, we'll simply use the alignment that is already set. */
908 if (base
== virtual_stack_vars_rtx
)
909 offset
-= frame_phase
;
910 align
= offset
& -offset
;
911 align
*= BITS_PER_UNIT
;
912 if (align
== 0 || align
> base_align
)
915 /* One would think that we could assert that we're not decreasing
916 alignment here, but (at least) the i386 port does exactly this
917 via the MINIMUM_ALIGNMENT hook. */
919 DECL_ALIGN (decl
) = align
;
920 DECL_USER_ALIGN (decl
) = 0;
923 set_mem_attributes (x
, SSAVAR (decl
), true);
927 struct stack_vars_data
929 /* Vector of offset pairs, always end of some padding followed
930 by start of the padding that needs Address Sanitizer protection.
931 The vector is in reversed, highest offset pairs come first. */
932 vec
<HOST_WIDE_INT
> asan_vec
;
934 /* Vector of partition representative decls in between the paddings. */
935 vec
<tree
> asan_decl_vec
;
937 /* Base pseudo register for Address Sanitizer protected automatic vars. */
940 /* Alignment needed for the Address Sanitizer protected automatic vars. */
941 unsigned int asan_alignb
;
944 /* A subroutine of expand_used_vars. Give each partition representative
945 a unique location within the stack frame. Update each partition member
946 with that location. */
949 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
951 size_t si
, i
, j
, n
= stack_vars_num
;
952 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
953 rtx large_base
= NULL
;
954 unsigned large_align
= 0;
957 /* Determine if there are any variables requiring "large" alignment.
958 Since these are dynamically allocated, we only process these if
959 no predicate involved. */
960 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
961 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
963 /* Find the total size of these variables. */
964 for (si
= 0; si
< n
; ++si
)
968 i
= stack_vars_sorted
[si
];
969 alignb
= stack_vars
[i
].alignb
;
971 /* All "large" alignment decls come before all "small" alignment
972 decls, but "large" alignment decls are not sorted based on
973 their alignment. Increase large_align to track the largest
974 required alignment. */
975 if ((alignb
* BITS_PER_UNIT
) > large_align
)
976 large_align
= alignb
* BITS_PER_UNIT
;
978 /* Stop when we get to the first decl with "small" alignment. */
979 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
982 /* Skip variables that aren't partition representatives. */
983 if (stack_vars
[i
].representative
!= i
)
986 /* Skip variables that have already had rtl assigned. See also
987 add_stack_var where we perpetrate this pc_rtx hack. */
988 decl
= stack_vars
[i
].decl
;
989 if ((TREE_CODE (decl
) == SSA_NAME
990 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
991 : DECL_RTL (decl
)) != pc_rtx
)
994 large_size
+= alignb
- 1;
995 large_size
&= -(HOST_WIDE_INT
)alignb
;
996 large_size
+= stack_vars
[i
].size
;
999 /* If there were any, allocate space. */
1001 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
1005 for (si
= 0; si
< n
; ++si
)
1008 unsigned base_align
, alignb
;
1009 HOST_WIDE_INT offset
;
1011 i
= stack_vars_sorted
[si
];
1013 /* Skip variables that aren't partition representatives, for now. */
1014 if (stack_vars
[i
].representative
!= i
)
1017 /* Skip variables that have already had rtl assigned. See also
1018 add_stack_var where we perpetrate this pc_rtx hack. */
1019 decl
= stack_vars
[i
].decl
;
1020 if ((TREE_CODE (decl
) == SSA_NAME
1021 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
1022 : DECL_RTL (decl
)) != pc_rtx
)
1025 /* Check the predicate to see whether this variable should be
1026 allocated in this pass. */
1027 if (pred
&& !pred (i
))
1030 alignb
= stack_vars
[i
].alignb
;
1031 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1033 base
= virtual_stack_vars_rtx
;
1034 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
&& pred
)
1036 HOST_WIDE_INT prev_offset
1037 = align_base (frame_offset
,
1038 MAX (alignb
, ASAN_RED_ZONE_SIZE
),
1039 FRAME_GROWS_DOWNWARD
);
1040 tree repr_decl
= NULL_TREE
;
1042 = alloc_stack_frame_space (stack_vars
[i
].size
1043 + ASAN_RED_ZONE_SIZE
,
1044 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
1046 data
->asan_vec
.safe_push (prev_offset
);
1047 data
->asan_vec
.safe_push (offset
+ stack_vars
[i
].size
);
1048 /* Find best representative of the partition.
1049 Prefer those with DECL_NAME, even better
1050 satisfying asan_protect_stack_decl predicate. */
1051 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1052 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1053 && DECL_NAME (stack_vars
[j
].decl
))
1055 repr_decl
= stack_vars
[j
].decl
;
1058 else if (repr_decl
== NULL_TREE
1059 && DECL_P (stack_vars
[j
].decl
)
1060 && DECL_NAME (stack_vars
[j
].decl
))
1061 repr_decl
= stack_vars
[j
].decl
;
1062 if (repr_decl
== NULL_TREE
)
1063 repr_decl
= stack_vars
[i
].decl
;
1064 data
->asan_decl_vec
.safe_push (repr_decl
);
1065 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1066 if (data
->asan_base
== NULL
)
1067 data
->asan_base
= gen_reg_rtx (Pmode
);
1068 base
= data
->asan_base
;
1070 if (!STRICT_ALIGNMENT
)
1071 base_align
= crtl
->max_used_stack_slot_alignment
;
1073 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1074 GET_MODE_ALIGNMENT (SImode
)
1075 << ASAN_SHADOW_SHIFT
);
1079 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1080 base_align
= crtl
->max_used_stack_slot_alignment
;
1085 /* Large alignment is only processed in the last pass. */
1088 gcc_assert (large_base
!= NULL
);
1090 large_alloc
+= alignb
- 1;
1091 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
1092 offset
= large_alloc
;
1093 large_alloc
+= stack_vars
[i
].size
;
1096 base_align
= large_align
;
1099 /* Create rtl for each variable based on their location within the
1101 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1103 expand_one_stack_var_at (stack_vars
[j
].decl
,
1109 gcc_assert (large_alloc
== large_size
);
1112 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1113 static HOST_WIDE_INT
1114 account_stack_vars (void)
1116 size_t si
, j
, i
, n
= stack_vars_num
;
1117 HOST_WIDE_INT size
= 0;
1119 for (si
= 0; si
< n
; ++si
)
1121 i
= stack_vars_sorted
[si
];
1123 /* Skip variables that aren't partition representatives, for now. */
1124 if (stack_vars
[i
].representative
!= i
)
1127 size
+= stack_vars
[i
].size
;
1128 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1129 set_rtl (stack_vars
[j
].decl
, NULL
);
1134 /* A subroutine of expand_one_var. Called to immediately assign rtl
1135 to a variable to be allocated in the stack frame. */
1138 expand_one_stack_var (tree var
)
1140 HOST_WIDE_INT size
, offset
;
1141 unsigned byte_align
;
1143 size
= tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var
)));
1144 byte_align
= align_local_variable (SSAVAR (var
));
1146 /* We handle highly aligned variables in expand_stack_vars. */
1147 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1149 offset
= alloc_stack_frame_space (size
, byte_align
);
1151 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1152 crtl
->max_used_stack_slot_alignment
, offset
);
1155 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1156 that will reside in a hard register. */
1159 expand_one_hard_reg_var (tree var
)
1161 rest_of_decl_compilation (var
, 0, 0);
1164 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1165 that will reside in a pseudo register. */
1168 expand_one_register_var (tree var
)
1170 tree decl
= SSAVAR (var
);
1171 tree type
= TREE_TYPE (decl
);
1172 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1173 rtx x
= gen_reg_rtx (reg_mode
);
1177 /* Note if the object is a user variable. */
1178 if (!DECL_ARTIFICIAL (decl
))
1181 if (POINTER_TYPE_P (type
))
1182 mark_reg_pointer (x
, get_pointer_alignment (var
));
1185 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1186 has some associated error, e.g. its type is error-mark. We just need
1187 to pick something that won't crash the rest of the compiler. */
1190 expand_one_error_var (tree var
)
1192 machine_mode mode
= DECL_MODE (var
);
1195 if (mode
== BLKmode
)
1196 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1197 else if (mode
== VOIDmode
)
1200 x
= gen_reg_rtx (mode
);
1202 SET_DECL_RTL (var
, x
);
1205 /* A subroutine of expand_one_var. VAR is a variable that will be
1206 allocated to the local stack frame. Return true if we wish to
1207 add VAR to STACK_VARS so that it will be coalesced with other
1208 variables. Return false to allocate VAR immediately.
1210 This function is used to reduce the number of variables considered
1211 for coalescing, which reduces the size of the quadratic problem. */
1214 defer_stack_allocation (tree var
, bool toplevel
)
1216 /* Whether the variable is small enough for immediate allocation not to be
1217 a problem with regard to the frame size. */
1219 = ((HOST_WIDE_INT
) tree_to_uhwi (DECL_SIZE_UNIT (var
))
1220 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
));
1222 /* If stack protection is enabled, *all* stack variables must be deferred,
1223 so that we can re-order the strings to the top of the frame.
1224 Similarly for Address Sanitizer. */
1225 if (flag_stack_protect
|| ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
))
1228 /* We handle "large" alignment via dynamic allocation. We want to handle
1229 this extra complication in only one place, so defer them. */
1230 if (DECL_ALIGN (var
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
1233 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1234 might be detached from their block and appear at toplevel when we reach
1235 here. We want to coalesce them with variables from other blocks when
1236 the immediate contribution to the frame size would be noticeable. */
1237 if (toplevel
&& optimize
> 0 && DECL_IGNORED_P (var
) && !smallish
)
1240 /* Variables declared in the outermost scope automatically conflict
1241 with every other variable. The only reason to want to defer them
1242 at all is that, after sorting, we can more efficiently pack
1243 small variables in the stack frame. Continue to defer at -O2. */
1244 if (toplevel
&& optimize
< 2)
1247 /* Without optimization, *most* variables are allocated from the
1248 stack, which makes the quadratic problem large exactly when we
1249 want compilation to proceed as quickly as possible. On the
1250 other hand, we don't want the function's stack frame size to
1251 get completely out of hand. So we avoid adding scalars and
1252 "small" aggregates to the list at all. */
1253 if (optimize
== 0 && smallish
)
1259 /* A subroutine of expand_used_vars. Expand one variable according to
1260 its flavor. Variables to be placed on the stack are not actually
1261 expanded yet, merely recorded.
1262 When REALLY_EXPAND is false, only add stack values to be allocated.
1263 Return stack usage this variable is supposed to take.
1266 static HOST_WIDE_INT
1267 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1269 unsigned int align
= BITS_PER_UNIT
;
1274 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
1276 /* Because we don't know if VAR will be in register or on stack,
1277 we conservatively assume it will be on stack even if VAR is
1278 eventually put into register after RA pass. For non-automatic
1279 variables, which won't be on stack, we collect alignment of
1280 type and ignore user specified alignment. Similarly for
1281 SSA_NAMEs for which use_register_for_decl returns true. */
1282 if (TREE_STATIC (var
)
1283 || DECL_EXTERNAL (var
)
1284 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1285 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1286 TYPE_MODE (TREE_TYPE (var
)),
1287 TYPE_ALIGN (TREE_TYPE (var
)));
1288 else if (DECL_HAS_VALUE_EXPR_P (var
)
1289 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1290 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1291 or variables which were assigned a stack slot already by
1292 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1293 changed from the offset chosen to it. */
1294 align
= crtl
->stack_alignment_estimated
;
1296 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1298 /* If the variable alignment is very large we'll dynamicaly allocate
1299 it, which means that in-frame portion is just a pointer. */
1300 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1301 align
= POINTER_SIZE
;
1304 if (SUPPORTS_STACK_ALIGNMENT
1305 && crtl
->stack_alignment_estimated
< align
)
1307 /* stack_alignment_estimated shouldn't change after stack
1308 realign decision made */
1309 gcc_assert (!crtl
->stack_realign_processed
);
1310 crtl
->stack_alignment_estimated
= align
;
1313 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1314 So here we only make sure stack_alignment_needed >= align. */
1315 if (crtl
->stack_alignment_needed
< align
)
1316 crtl
->stack_alignment_needed
= align
;
1317 if (crtl
->max_used_stack_slot_alignment
< align
)
1318 crtl
->max_used_stack_slot_alignment
= align
;
1320 if (TREE_CODE (origvar
) == SSA_NAME
)
1322 gcc_assert (TREE_CODE (var
) != VAR_DECL
1323 || (!DECL_EXTERNAL (var
)
1324 && !DECL_HAS_VALUE_EXPR_P (var
)
1325 && !TREE_STATIC (var
)
1326 && TREE_TYPE (var
) != error_mark_node
1327 && !DECL_HARD_REGISTER (var
)
1330 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1332 else if (DECL_EXTERNAL (var
))
1334 else if (DECL_HAS_VALUE_EXPR_P (var
))
1336 else if (TREE_STATIC (var
))
1338 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1340 else if (TREE_TYPE (var
) == error_mark_node
)
1343 expand_one_error_var (var
);
1345 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1349 expand_one_hard_reg_var (var
);
1350 if (!DECL_HARD_REGISTER (var
))
1351 /* Invalid register specification. */
1352 expand_one_error_var (var
);
1355 else if (use_register_for_decl (var
))
1358 expand_one_register_var (origvar
);
1360 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1362 /* Reject variables which cover more than half of the address-space. */
1365 error ("size of variable %q+D is too large", var
);
1366 expand_one_error_var (var
);
1369 else if (defer_stack_allocation (var
, toplevel
))
1370 add_stack_var (origvar
);
1374 expand_one_stack_var (origvar
);
1375 return tree_to_uhwi (DECL_SIZE_UNIT (var
));
1380 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1381 expanding variables. Those variables that can be put into registers
1382 are allocated pseudos; those that can't are put on the stack.
1384 TOPLEVEL is true if this is the outermost BLOCK. */
1387 expand_used_vars_for_block (tree block
, bool toplevel
)
1391 /* Expand all variables at this level. */
1392 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1394 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1395 || !DECL_NONSHAREABLE (t
)))
1396 expand_one_var (t
, toplevel
, true);
1398 /* Expand all variables at containing levels. */
1399 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1400 expand_used_vars_for_block (t
, false);
1403 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1404 and clear TREE_USED on all local variables. */
1407 clear_tree_used (tree block
)
1411 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1412 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1413 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1414 || !DECL_NONSHAREABLE (t
))
1417 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1418 clear_tree_used (t
);
1422 SPCT_FLAG_DEFAULT
= 1,
1424 SPCT_FLAG_STRONG
= 3,
1425 SPCT_FLAG_EXPLICIT
= 4
1428 /* Examine TYPE and determine a bit mask of the following features. */
1430 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1431 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1432 #define SPCT_HAS_ARRAY 4
1433 #define SPCT_HAS_AGGREGATE 8
1436 stack_protect_classify_type (tree type
)
1438 unsigned int ret
= 0;
1441 switch (TREE_CODE (type
))
1444 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1445 if (t
== char_type_node
1446 || t
== signed_char_type_node
1447 || t
== unsigned_char_type_node
)
1449 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1450 unsigned HOST_WIDE_INT len
;
1452 if (!TYPE_SIZE_UNIT (type
)
1453 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1456 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1459 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1461 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1464 ret
= SPCT_HAS_ARRAY
;
1468 case QUAL_UNION_TYPE
:
1470 ret
= SPCT_HAS_AGGREGATE
;
1471 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1472 if (TREE_CODE (t
) == FIELD_DECL
)
1473 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1483 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1484 part of the local stack frame. Remember if we ever return nonzero for
1485 any variable in this function. The return value is the phase number in
1486 which the variable should be allocated. */
1489 stack_protect_decl_phase (tree decl
)
1491 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1494 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1495 has_short_buffer
= true;
1497 if (flag_stack_protect
== SPCT_FLAG_ALL
1498 || flag_stack_protect
== SPCT_FLAG_STRONG
1499 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1500 && lookup_attribute ("stack_protect",
1501 DECL_ATTRIBUTES (current_function_decl
))))
1503 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1504 && !(bits
& SPCT_HAS_AGGREGATE
))
1506 else if (bits
& SPCT_HAS_ARRAY
)
1510 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1513 has_protected_decls
= true;
1518 /* Two helper routines that check for phase 1 and phase 2. These are used
1519 as callbacks for expand_stack_vars. */
1522 stack_protect_decl_phase_1 (size_t i
)
1524 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1528 stack_protect_decl_phase_2 (size_t i
)
1530 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1533 /* And helper function that checks for asan phase (with stack protector
1534 it is phase 3). This is used as callback for expand_stack_vars.
1535 Returns true if any of the vars in the partition need to be protected. */
1538 asan_decl_phase_3 (size_t i
)
1542 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1544 i
= stack_vars
[i
].next
;
1549 /* Ensure that variables in different stack protection phases conflict
1550 so that they are not merged and share the same stack slot. */
1553 add_stack_protection_conflicts (void)
1555 size_t i
, j
, n
= stack_vars_num
;
1556 unsigned char *phase
;
1558 phase
= XNEWVEC (unsigned char, n
);
1559 for (i
= 0; i
< n
; ++i
)
1560 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1562 for (i
= 0; i
< n
; ++i
)
1564 unsigned char ph_i
= phase
[i
];
1565 for (j
= i
+ 1; j
< n
; ++j
)
1566 if (ph_i
!= phase
[j
])
1567 add_stack_var_conflict (i
, j
);
1573 /* Create a decl for the guard at the top of the stack frame. */
1576 create_stack_guard (void)
1578 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1579 VAR_DECL
, NULL
, ptr_type_node
);
1580 TREE_THIS_VOLATILE (guard
) = 1;
1581 TREE_USED (guard
) = 1;
1582 expand_one_stack_var (guard
);
1583 crtl
->stack_protect_guard
= guard
;
1586 /* Prepare for expanding variables. */
1588 init_vars_expansion (void)
1590 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1591 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1593 /* A map from decl to stack partition. */
1594 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1596 /* Initialize local stack smashing state. */
1597 has_protected_decls
= false;
1598 has_short_buffer
= false;
1601 /* Free up stack variable graph data. */
1603 fini_vars_expansion (void)
1605 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1607 XDELETEVEC (stack_vars
);
1608 if (stack_vars_sorted
)
1609 XDELETEVEC (stack_vars_sorted
);
1611 stack_vars_sorted
= NULL
;
1612 stack_vars_alloc
= stack_vars_num
= 0;
1613 delete decl_to_stack_part
;
1614 decl_to_stack_part
= NULL
;
1617 /* Make a fair guess for the size of the stack frame of the function
1618 in NODE. This doesn't have to be exact, the result is only used in
1619 the inline heuristics. So we don't want to run the full stack var
1620 packing algorithm (which is quadratic in the number of stack vars).
1621 Instead, we calculate the total size of all stack vars. This turns
1622 out to be a pretty fair estimate -- packing of stack vars doesn't
1623 happen very often. */
1626 estimated_stack_frame_size (struct cgraph_node
*node
)
1628 HOST_WIDE_INT size
= 0;
1631 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1635 init_vars_expansion ();
1637 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1638 if (auto_var_in_fn_p (var
, fn
->decl
))
1639 size
+= expand_one_var (var
, true, false);
1641 if (stack_vars_num
> 0)
1643 /* Fake sorting the stack vars for account_stack_vars (). */
1644 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1645 for (i
= 0; i
< stack_vars_num
; ++i
)
1646 stack_vars_sorted
[i
] = i
;
1647 size
+= account_stack_vars ();
1650 fini_vars_expansion ();
1655 /* Helper routine to check if a record or union contains an array field. */
1658 record_or_union_type_has_array_p (const_tree tree_type
)
1660 tree fields
= TYPE_FIELDS (tree_type
);
1663 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1664 if (TREE_CODE (f
) == FIELD_DECL
)
1666 tree field_type
= TREE_TYPE (f
);
1667 if (RECORD_OR_UNION_TYPE_P (field_type
)
1668 && record_or_union_type_has_array_p (field_type
))
1670 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1676 /* Check if the current function has local referenced variables that
1677 have their addresses taken, contain an array, or are arrays. */
1680 stack_protect_decl_p ()
1685 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1686 if (!is_global_var (var
))
1688 tree var_type
= TREE_TYPE (var
);
1689 if (TREE_CODE (var
) == VAR_DECL
1690 && (TREE_CODE (var_type
) == ARRAY_TYPE
1691 || TREE_ADDRESSABLE (var
)
1692 || (RECORD_OR_UNION_TYPE_P (var_type
)
1693 && record_or_union_type_has_array_p (var_type
))))
1699 /* Check if the current function has calls that use a return slot. */
1702 stack_protect_return_slot_p ()
1706 FOR_ALL_BB_FN (bb
, cfun
)
1707 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
1708 !gsi_end_p (gsi
); gsi_next (&gsi
))
1710 gimple stmt
= gsi_stmt (gsi
);
1711 /* This assumes that calls to internal-only functions never
1712 use a return slot. */
1713 if (is_gimple_call (stmt
)
1714 && !gimple_call_internal_p (stmt
)
1715 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
1716 gimple_call_fndecl (stmt
)))
1722 /* Expand all variables used in the function. */
1725 expand_used_vars (void)
1727 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1728 vec
<tree
> maybe_local_decls
= vNULL
;
1729 rtx_insn
*var_end_seq
= NULL
;
1732 bool gen_stack_protect_signal
= false;
1734 /* Compute the phase of the stack frame for this function. */
1736 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1737 int off
= STARTING_FRAME_OFFSET
% align
;
1738 frame_phase
= off
? align
- off
: 0;
1741 /* Set TREE_USED on all variables in the local_decls. */
1742 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1743 TREE_USED (var
) = 1;
1744 /* Clear TREE_USED on all variables associated with a block scope. */
1745 clear_tree_used (DECL_INITIAL (current_function_decl
));
1747 init_vars_expansion ();
1749 if (targetm
.use_pseudo_pic_reg ())
1750 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
1752 hash_map
<tree
, tree
> ssa_name_decls
;
1753 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
1755 tree var
= partition_to_var (SA
.map
, i
);
1757 gcc_assert (!virtual_operand_p (var
));
1759 /* Assign decls to each SSA name partition, share decls for partitions
1760 we could have coalesced (those with the same type). */
1761 if (SSA_NAME_VAR (var
) == NULL_TREE
)
1763 tree
*slot
= &ssa_name_decls
.get_or_insert (TREE_TYPE (var
));
1765 *slot
= create_tmp_reg (TREE_TYPE (var
));
1766 replace_ssa_name_symbol (var
, *slot
);
1769 /* Always allocate space for partitions based on VAR_DECLs. But for
1770 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1771 debug info, there is no need to do so if optimization is disabled
1772 because all the SSA_NAMEs based on these DECLs have been coalesced
1773 into a single partition, which is thus assigned the canonical RTL
1774 location of the DECLs. If in_lto_p, we can't rely on optimize,
1775 a function could be compiled with -O1 -flto first and only the
1776 link performed at -O0. */
1777 if (TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
1778 expand_one_var (var
, true, true);
1779 else if (DECL_IGNORED_P (SSA_NAME_VAR (var
)) || optimize
|| in_lto_p
)
1781 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1782 contain the default def (representing the parm or result itself)
1783 we don't do anything here. But those which don't contain the
1784 default def (representing a temporary based on the parm/result)
1785 we need to allocate space just like for normal VAR_DECLs. */
1786 if (!bitmap_bit_p (SA
.partition_has_default_def
, i
))
1788 expand_one_var (var
, true, true);
1789 gcc_assert (SA
.partition_to_pseudo
[i
]);
1794 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
1795 gen_stack_protect_signal
1796 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1798 /* At this point all variables on the local_decls with TREE_USED
1799 set are not associated with any block scope. Lay them out. */
1801 len
= vec_safe_length (cfun
->local_decls
);
1802 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1804 bool expand_now
= false;
1806 /* Expanded above already. */
1807 if (is_gimple_reg (var
))
1809 TREE_USED (var
) = 0;
1812 /* We didn't set a block for static or extern because it's hard
1813 to tell the difference between a global variable (re)declared
1814 in a local scope, and one that's really declared there to
1815 begin with. And it doesn't really matter much, since we're
1816 not giving them stack space. Expand them now. */
1817 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1820 /* Expand variables not associated with any block now. Those created by
1821 the optimizers could be live anywhere in the function. Those that
1822 could possibly have been scoped originally and detached from their
1823 block will have their allocation deferred so we coalesce them with
1824 others when optimization is enabled. */
1825 else if (TREE_USED (var
))
1828 /* Finally, mark all variables on the list as used. We'll use
1829 this in a moment when we expand those associated with scopes. */
1830 TREE_USED (var
) = 1;
1833 expand_one_var (var
, true, true);
1836 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
1838 rtx rtl
= DECL_RTL_IF_SET (var
);
1840 /* Keep artificial non-ignored vars in cfun->local_decls
1841 chain until instantiate_decls. */
1842 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1843 add_local_decl (cfun
, var
);
1844 else if (rtl
== NULL_RTX
)
1845 /* If rtl isn't set yet, which can happen e.g. with
1846 -fstack-protector, retry before returning from this
1848 maybe_local_decls
.safe_push (var
);
1852 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1854 +-----------------+-----------------+
1855 | ...processed... | ...duplicates...|
1856 +-----------------+-----------------+
1858 +-- LEN points here.
1860 We just want the duplicates, as those are the artificial
1861 non-ignored vars that we want to keep until instantiate_decls.
1862 Move them down and truncate the array. */
1863 if (!vec_safe_is_empty (cfun
->local_decls
))
1864 cfun
->local_decls
->block_remove (0, len
);
1866 /* At this point, all variables within the block tree with TREE_USED
1867 set are actually used by the optimized function. Lay them out. */
1868 expand_used_vars_for_block (outer_block
, true);
1870 if (stack_vars_num
> 0)
1872 add_scope_conflicts ();
1874 /* If stack protection is enabled, we don't share space between
1875 vulnerable data and non-vulnerable data. */
1876 if (flag_stack_protect
!= 0
1877 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
1878 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1879 && lookup_attribute ("stack_protect",
1880 DECL_ATTRIBUTES (current_function_decl
)))))
1881 add_stack_protection_conflicts ();
1883 /* Now that we have collected all stack variables, and have computed a
1884 minimal interference graph, attempt to save some stack space. */
1885 partition_stack_vars ();
1887 dump_stack_var_partition ();
1890 switch (flag_stack_protect
)
1893 create_stack_guard ();
1896 case SPCT_FLAG_STRONG
:
1897 if (gen_stack_protect_signal
1898 || cfun
->calls_alloca
|| has_protected_decls
1899 || lookup_attribute ("stack_protect",
1900 DECL_ATTRIBUTES (current_function_decl
)))
1901 create_stack_guard ();
1904 case SPCT_FLAG_DEFAULT
:
1905 if (cfun
->calls_alloca
|| has_protected_decls
1906 || lookup_attribute ("stack_protect",
1907 DECL_ATTRIBUTES (current_function_decl
)))
1908 create_stack_guard ();
1911 case SPCT_FLAG_EXPLICIT
:
1912 if (lookup_attribute ("stack_protect",
1913 DECL_ATTRIBUTES (current_function_decl
)))
1914 create_stack_guard ();
1920 /* Assign rtl to each variable based on these partitions. */
1921 if (stack_vars_num
> 0)
1923 struct stack_vars_data data
;
1925 data
.asan_vec
= vNULL
;
1926 data
.asan_decl_vec
= vNULL
;
1927 data
.asan_base
= NULL_RTX
;
1928 data
.asan_alignb
= 0;
1930 /* Reorder decls to be protected by iterating over the variables
1931 array multiple times, and allocating out of each phase in turn. */
1932 /* ??? We could probably integrate this into the qsort we did
1933 earlier, such that we naturally see these variables first,
1934 and thus naturally allocate things in the right order. */
1935 if (has_protected_decls
)
1937 /* Phase 1 contains only character arrays. */
1938 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
1940 /* Phase 2 contains other kinds of arrays. */
1941 if (flag_stack_protect
== SPCT_FLAG_ALL
1942 || flag_stack_protect
== SPCT_FLAG_STRONG
1943 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1944 && lookup_attribute ("stack_protect",
1945 DECL_ATTRIBUTES (current_function_decl
))))
1946 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
1949 if ((flag_sanitize
& SANITIZE_ADDRESS
) && ASAN_STACK
)
1950 /* Phase 3, any partitions that need asan protection
1951 in addition to phase 1 and 2. */
1952 expand_stack_vars (asan_decl_phase_3
, &data
);
1954 if (!data
.asan_vec
.is_empty ())
1956 HOST_WIDE_INT prev_offset
= frame_offset
;
1957 HOST_WIDE_INT offset
, sz
, redzonesz
;
1958 redzonesz
= ASAN_RED_ZONE_SIZE
;
1959 sz
= data
.asan_vec
[0] - prev_offset
;
1960 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
1961 && data
.asan_alignb
<= 4096
1962 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
1963 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
1964 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
1966 = alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
);
1967 data
.asan_vec
.safe_push (prev_offset
);
1968 data
.asan_vec
.safe_push (offset
);
1969 /* Leave space for alignment if STRICT_ALIGNMENT. */
1970 if (STRICT_ALIGNMENT
)
1971 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
1972 << ASAN_SHADOW_SHIFT
)
1973 / BITS_PER_UNIT
, 1);
1976 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
1979 data
.asan_vec
.address (),
1980 data
.asan_decl_vec
.address (),
1981 data
.asan_vec
.length ());
1984 expand_stack_vars (NULL
, &data
);
1986 data
.asan_vec
.release ();
1987 data
.asan_decl_vec
.release ();
1990 fini_vars_expansion ();
1992 /* If there were any artificial non-ignored vars without rtl
1993 found earlier, see if deferred stack allocation hasn't assigned
1995 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
1997 rtx rtl
= DECL_RTL_IF_SET (var
);
1999 /* Keep artificial non-ignored vars in cfun->local_decls
2000 chain until instantiate_decls. */
2001 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2002 add_local_decl (cfun
, var
);
2004 maybe_local_decls
.release ();
2006 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2007 if (STACK_ALIGNMENT_NEEDED
)
2009 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2010 if (!FRAME_GROWS_DOWNWARD
)
2011 frame_offset
+= align
- 1;
2012 frame_offset
&= -align
;
2019 /* If we need to produce a detailed dump, print the tree representation
2020 for STMT to the dump file. SINCE is the last RTX after which the RTL
2021 generated for STMT should have been appended. */
2024 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx_insn
*since
)
2026 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2028 fprintf (dump_file
, "\n;; ");
2029 print_gimple_stmt (dump_file
, stmt
, 0,
2030 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2031 fprintf (dump_file
, "\n");
2033 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2037 /* Maps the blocks that do not contain tree labels to rtx labels. */
2039 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2041 /* Returns the label_rtx expression for a label starting basic block BB. */
2043 static rtx_code_label
*
2044 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2046 gimple_stmt_iterator gsi
;
2049 if (bb
->flags
& BB_RTL
)
2050 return block_label (bb
);
2052 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2056 /* Find the tree label if it is present. */
2058 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2062 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2066 lab
= gimple_label_label (lab_stmt
);
2067 if (DECL_NONLOCAL (lab
))
2070 return jump_target_rtx (lab
);
2073 rtx_code_label
*l
= gen_label_rtx ();
2074 lab_rtx_for_bb
->put (bb
, l
);
2079 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2080 of a basic block where we just expanded the conditional at the end,
2081 possibly clean up the CFG and instruction sequence. LAST is the
2082 last instruction before the just emitted jump sequence. */
2085 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2087 /* Special case: when jumpif decides that the condition is
2088 trivial it emits an unconditional jump (and the necessary
2089 barrier). But we still have two edges, the fallthru one is
2090 wrong. purge_dead_edges would clean this up later. Unfortunately
2091 we have to insert insns (and split edges) before
2092 find_many_sub_basic_blocks and hence before purge_dead_edges.
2093 But splitting edges might create new blocks which depend on the
2094 fact that if there are two edges there's no barrier. So the
2095 barrier would get lost and verify_flow_info would ICE. Instead
2096 of auditing all edge splitters to care for the barrier (which
2097 normally isn't there in a cleaned CFG), fix it here. */
2098 if (BARRIER_P (get_last_insn ()))
2102 /* Now, we have a single successor block, if we have insns to
2103 insert on the remaining edge we potentially will insert
2104 it at the end of this block (if the dest block isn't feasible)
2105 in order to avoid splitting the edge. This insertion will take
2106 place in front of the last jump. But we might have emitted
2107 multiple jumps (conditional and one unconditional) to the
2108 same destination. Inserting in front of the last one then
2109 is a problem. See PR 40021. We fix this by deleting all
2110 jumps except the last unconditional one. */
2111 insn
= PREV_INSN (get_last_insn ());
2112 /* Make sure we have an unconditional jump. Otherwise we're
2114 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2115 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2117 insn
= PREV_INSN (insn
);
2118 if (JUMP_P (NEXT_INSN (insn
)))
2120 if (!any_condjump_p (NEXT_INSN (insn
)))
2122 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2123 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2125 delete_insn (NEXT_INSN (insn
));
2131 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2132 Returns a new basic block if we've terminated the current basic
2133 block and created a new one. */
2136 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2138 basic_block new_bb
, dest
;
2142 rtx_insn
*last2
, *last
;
2143 enum tree_code code
;
2146 code
= gimple_cond_code (stmt
);
2147 op0
= gimple_cond_lhs (stmt
);
2148 op1
= gimple_cond_rhs (stmt
);
2149 /* We're sometimes presented with such code:
2153 This would expand to two comparisons which then later might
2154 be cleaned up by combine. But some pattern matchers like if-conversion
2155 work better when there's only one compare, so make up for this
2156 here as special exception if TER would have made the same change. */
2158 && TREE_CODE (op0
) == SSA_NAME
2159 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2160 && TREE_CODE (op1
) == INTEGER_CST
2161 && ((gimple_cond_code (stmt
) == NE_EXPR
2162 && integer_zerop (op1
))
2163 || (gimple_cond_code (stmt
) == EQ_EXPR
2164 && integer_onep (op1
)))
2165 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2167 gimple second
= SSA_NAME_DEF_STMT (op0
);
2168 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2170 enum tree_code code2
= gimple_assign_rhs_code (second
);
2171 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2174 op0
= gimple_assign_rhs1 (second
);
2175 op1
= gimple_assign_rhs2 (second
);
2177 /* If jumps are cheap and the target does not support conditional
2178 compare, turn some more codes into jumpy sequences. */
2179 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2180 && targetm
.gen_ccmp_first
== NULL
)
2182 if ((code2
== BIT_AND_EXPR
2183 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2184 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2185 || code2
== TRUTH_AND_EXPR
)
2187 code
= TRUTH_ANDIF_EXPR
;
2188 op0
= gimple_assign_rhs1 (second
);
2189 op1
= gimple_assign_rhs2 (second
);
2191 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2193 code
= TRUTH_ORIF_EXPR
;
2194 op0
= gimple_assign_rhs1 (second
);
2195 op1
= gimple_assign_rhs2 (second
);
2201 last2
= last
= get_last_insn ();
2203 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2204 set_curr_insn_location (gimple_location (stmt
));
2206 /* These flags have no purpose in RTL land. */
2207 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2208 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2210 /* We can either have a pure conditional jump with one fallthru edge or
2211 two-way jump that needs to be decomposed into two basic blocks. */
2212 if (false_edge
->dest
== bb
->next_bb
)
2214 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2215 true_edge
->probability
);
2216 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2217 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2218 set_curr_insn_location (true_edge
->goto_locus
);
2219 false_edge
->flags
|= EDGE_FALLTHRU
;
2220 maybe_cleanup_end_of_block (false_edge
, last
);
2223 if (true_edge
->dest
== bb
->next_bb
)
2225 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2226 false_edge
->probability
);
2227 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2228 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2229 set_curr_insn_location (false_edge
->goto_locus
);
2230 true_edge
->flags
|= EDGE_FALLTHRU
;
2231 maybe_cleanup_end_of_block (true_edge
, last
);
2235 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2236 true_edge
->probability
);
2237 last
= get_last_insn ();
2238 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2239 set_curr_insn_location (false_edge
->goto_locus
);
2240 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2243 if (BARRIER_P (BB_END (bb
)))
2244 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2245 update_bb_for_insn (bb
);
2247 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2248 dest
= false_edge
->dest
;
2249 redirect_edge_succ (false_edge
, new_bb
);
2250 false_edge
->flags
|= EDGE_FALLTHRU
;
2251 new_bb
->count
= false_edge
->count
;
2252 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
2253 add_bb_to_loop (new_bb
, bb
->loop_father
);
2254 new_edge
= make_edge (new_bb
, dest
, 0);
2255 new_edge
->probability
= REG_BR_PROB_BASE
;
2256 new_edge
->count
= new_bb
->count
;
2257 if (BARRIER_P (BB_END (new_bb
)))
2258 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2259 update_bb_for_insn (new_bb
);
2261 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2263 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2265 set_curr_insn_location (true_edge
->goto_locus
);
2266 true_edge
->goto_locus
= curr_insn_location ();
2272 /* Mark all calls that can have a transaction restart. */
2275 mark_transaction_restart_calls (gimple stmt
)
2277 struct tm_restart_node dummy
;
2278 tm_restart_node
**slot
;
2280 if (!cfun
->gimple_df
->tm_restart
)
2284 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2287 struct tm_restart_node
*n
= *slot
;
2288 tree list
= n
->label_or_list
;
2291 for (insn
= next_real_insn (get_last_insn ());
2293 insn
= next_real_insn (insn
))
2296 if (TREE_CODE (list
) == LABEL_DECL
)
2297 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2299 for (; list
; list
= TREE_CHAIN (list
))
2300 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2304 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2308 expand_call_stmt (gcall
*stmt
)
2310 tree exp
, decl
, lhs
;
2314 if (gimple_call_internal_p (stmt
))
2316 expand_internal_call (stmt
);
2320 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2322 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2323 decl
= gimple_call_fndecl (stmt
);
2324 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2326 /* If this is not a builtin function, the function type through which the
2327 call is made may be different from the type of the function. */
2330 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2331 CALL_EXPR_FN (exp
));
2333 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2334 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2336 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2338 tree arg
= gimple_call_arg (stmt
, i
);
2340 /* TER addresses into arguments of builtin functions so we have a
2341 chance to infer more correct alignment information. See PR39954. */
2343 && TREE_CODE (arg
) == SSA_NAME
2344 && (def
= get_gimple_for_ssa_name (arg
))
2345 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2346 arg
= gimple_assign_rhs1 (def
);
2347 CALL_EXPR_ARG (exp
, i
) = arg
;
2350 if (gimple_has_side_effects (stmt
))
2351 TREE_SIDE_EFFECTS (exp
) = 1;
2353 if (gimple_call_nothrow_p (stmt
))
2354 TREE_NOTHROW (exp
) = 1;
2356 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2357 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2359 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2360 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
2361 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2362 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2364 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2365 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2366 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2367 CALL_WITH_BOUNDS_P (exp
) = gimple_call_with_bounds_p (stmt
);
2369 /* Ensure RTL is created for debug args. */
2370 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2372 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2377 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2379 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2380 expand_debug_expr (dtemp
);
2384 lhs
= gimple_call_lhs (stmt
);
2386 expand_assignment (lhs
, exp
, false);
2388 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2390 mark_transaction_restart_calls (stmt
);
2394 /* Generate RTL for an asm statement (explicit assembler code).
2395 STRING is a STRING_CST node containing the assembler code text,
2396 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2397 insn is volatile; don't optimize it. */
2400 expand_asm_loc (tree string
, int vol
, location_t locus
)
2404 if (TREE_CODE (string
) == ADDR_EXPR
)
2405 string
= TREE_OPERAND (string
, 0);
2407 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2408 ggc_strdup (TREE_STRING_POINTER (string
)),
2411 MEM_VOLATILE_P (body
) = vol
;
2416 /* Return the number of times character C occurs in string S. */
2418 n_occurrences (int c
, const char *s
)
2426 /* A subroutine of expand_asm_operands. Check that all operands have
2427 the same number of alternatives. Return true if so. */
2430 check_operand_nalternatives (const vec
<const char *> &constraints
)
2432 unsigned len
= constraints
.length();
2435 int nalternatives
= n_occurrences (',', constraints
[0]);
2437 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2439 error ("too many alternatives in %<asm%>");
2443 for (unsigned i
= 1; i
< len
; ++i
)
2444 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2446 error ("operand constraints for %<asm%> differ "
2447 "in number of alternatives");
2454 /* Check for overlap between registers marked in CLOBBERED_REGS and
2455 anything inappropriate in T. Emit error and return the register
2456 variable definition for error, NULL_TREE for ok. */
2459 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2461 /* Conflicts between asm-declared register variables and the clobber
2462 list are not allowed. */
2463 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2467 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2468 DECL_NAME (overlap
));
2470 /* Reset registerness to stop multiple errors emitted for a single
2472 DECL_REGISTER (overlap
) = 0;
2479 /* Generate RTL for an asm statement with arguments.
2480 STRING is the instruction template.
2481 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2482 Each output or input has an expression in the TREE_VALUE and
2483 a tree list in TREE_PURPOSE which in turn contains a constraint
2484 name in TREE_VALUE (or NULL_TREE) and a constraint string
2486 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2487 that is clobbered by this insn.
2489 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2490 should be the fallthru basic block of the asm goto.
2492 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2493 Some elements of OUTPUTS may be replaced with trees representing temporary
2494 values. The caller should copy those temporary values to the originally
2497 VOL nonzero means the insn is volatile; don't optimize it. */
2500 expand_asm_stmt (gasm
*stmt
)
2502 class save_input_location
2507 explicit save_input_location(location_t where
)
2509 old
= input_location
;
2510 input_location
= where
;
2513 ~save_input_location()
2515 input_location
= old
;
2519 location_t locus
= gimple_location (stmt
);
2521 if (gimple_asm_input_p (stmt
))
2523 const char *s
= gimple_asm_string (stmt
);
2524 tree string
= build_string (strlen (s
), s
);
2525 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2529 /* There are some legacy diagnostics in here, and also avoids a
2530 sixth parameger to targetm.md_asm_adjust. */
2531 save_input_location
s_i_l(locus
);
2533 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2534 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2535 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2538 /* ??? Diagnose during gimplification? */
2539 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2541 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2545 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2546 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2547 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2549 /* Copy the gimple vectors into new vectors that we can manipulate. */
2551 output_tvec
.safe_grow (noutputs
);
2552 input_tvec
.safe_grow (ninputs
);
2553 constraints
.safe_grow (noutputs
+ ninputs
);
2555 for (i
= 0; i
< noutputs
; ++i
)
2557 tree t
= gimple_asm_output_op (stmt
, i
);
2558 output_tvec
[i
] = TREE_VALUE (t
);
2559 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2561 for (i
= 0; i
< ninputs
; i
++)
2563 tree t
= gimple_asm_input_op (stmt
, i
);
2564 input_tvec
[i
] = TREE_VALUE (t
);
2565 constraints
[i
+ noutputs
]
2566 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2569 /* ??? Diagnose during gimplification? */
2570 if (! check_operand_nalternatives (constraints
))
2573 /* Count the number of meaningful clobbered registers, ignoring what
2574 we would ignore later. */
2575 auto_vec
<rtx
> clobber_rvec
;
2576 HARD_REG_SET clobbered_regs
;
2577 CLEAR_HARD_REG_SET (clobbered_regs
);
2579 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2581 clobber_rvec
.reserve (n
);
2582 for (i
= 0; i
< n
; i
++)
2584 tree t
= gimple_asm_clobber_op (stmt
, i
);
2585 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2588 j
= decode_reg_name_and_count (regname
, &nregs
);
2593 /* ??? Diagnose during gimplification? */
2594 error ("unknown register name %qs in %<asm%>", regname
);
2598 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2599 clobber_rvec
.safe_push (x
);
2603 /* Otherwise we should have -1 == empty string
2604 or -3 == cc, which is not a register. */
2605 gcc_assert (j
== -1 || j
== -3);
2609 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
2611 /* Clobbering the PIC register is an error. */
2612 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2614 /* ??? Diagnose during gimplification? */
2615 error ("PIC register clobbered by %qs in %<asm%>",
2620 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2621 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
2622 clobber_rvec
.safe_push (x
);
2626 unsigned nclobbers
= clobber_rvec
.length();
2628 /* First pass over inputs and outputs checks validity and sets
2629 mark_addressable if needed. */
2630 /* ??? Diagnose during gimplification? */
2632 for (i
= 0; i
< noutputs
; ++i
)
2634 tree val
= output_tvec
[i
];
2635 tree type
= TREE_TYPE (val
);
2636 const char *constraint
;
2641 /* Try to parse the output constraint. If that fails, there's
2642 no point in going further. */
2643 constraint
= constraints
[i
];
2644 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
2645 &allows_mem
, &allows_reg
, &is_inout
))
2652 && REG_P (DECL_RTL (val
))
2653 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
2654 mark_addressable (val
);
2657 for (i
= 0; i
< ninputs
; ++i
)
2659 bool allows_reg
, allows_mem
;
2660 const char *constraint
;
2662 constraint
= constraints
[i
+ noutputs
];
2663 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
2664 constraints
.address (),
2665 &allows_mem
, &allows_reg
))
2668 if (! allows_reg
&& allows_mem
)
2669 mark_addressable (input_tvec
[i
]);
2672 /* Second pass evaluates arguments. */
2674 /* Make sure stack is consistent for asm goto. */
2676 do_pending_stack_adjust ();
2677 int old_generating_concat_p
= generating_concat_p
;
2679 /* Vector of RTX's of evaluated output operands. */
2680 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
2681 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
2682 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
2684 output_rvec
.safe_grow (noutputs
);
2686 for (i
= 0; i
< noutputs
; ++i
)
2688 tree val
= output_tvec
[i
];
2689 tree type
= TREE_TYPE (val
);
2690 bool is_inout
, allows_reg
, allows_mem
, ok
;
2693 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
2694 noutputs
, &allows_mem
, &allows_reg
,
2698 /* If an output operand is not a decl or indirect ref and our constraint
2699 allows a register, make a temporary to act as an intermediate.
2700 Make the asm insn write into that, then we will copy it to
2701 the real output operand. Likewise for promoted variables. */
2703 generating_concat_p
= 0;
2705 if ((TREE_CODE (val
) == INDIRECT_REF
2708 && (allows_mem
|| REG_P (DECL_RTL (val
)))
2709 && ! (REG_P (DECL_RTL (val
))
2710 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
2714 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
2715 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
2717 op
= validize_mem (op
);
2719 if (! allows_reg
&& !MEM_P (op
))
2720 error ("output number %d not directly addressable", i
);
2721 if ((! allows_mem
&& MEM_P (op
))
2722 || GET_CODE (op
) == CONCAT
)
2725 op
= gen_reg_rtx (GET_MODE (op
));
2727 generating_concat_p
= old_generating_concat_p
;
2730 emit_move_insn (op
, old_op
);
2732 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
2733 emit_move_insn (old_op
, op
);
2734 after_rtl_seq
= get_insns ();
2735 after_rtl_end
= get_last_insn ();
2741 op
= assign_temp (type
, 0, 1);
2742 op
= validize_mem (op
);
2743 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
2744 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
2746 generating_concat_p
= old_generating_concat_p
;
2748 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
2749 expand_assignment (val
, make_tree (type
, op
), false);
2750 after_rtl_seq
= get_insns ();
2751 after_rtl_end
= get_last_insn ();
2754 output_rvec
[i
] = op
;
2757 inout_opnum
.safe_push (i
);
2760 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
2761 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
2763 input_rvec
.safe_grow (ninputs
);
2764 input_mode
.safe_grow (ninputs
);
2766 generating_concat_p
= 0;
2768 for (i
= 0; i
< ninputs
; ++i
)
2770 tree val
= input_tvec
[i
];
2771 tree type
= TREE_TYPE (val
);
2772 bool allows_reg
, allows_mem
, ok
;
2773 const char *constraint
;
2776 constraint
= constraints
[i
+ noutputs
];
2777 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
2778 constraints
.address (),
2779 &allows_mem
, &allows_reg
);
2782 /* EXPAND_INITIALIZER will not generate code for valid initializer
2783 constants, but will still generate code for other types of operand.
2784 This is the behavior we want for constant constraints. */
2785 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
2786 allows_reg
? EXPAND_NORMAL
2787 : allows_mem
? EXPAND_MEMORY
2788 : EXPAND_INITIALIZER
);
2790 /* Never pass a CONCAT to an ASM. */
2791 if (GET_CODE (op
) == CONCAT
)
2792 op
= force_reg (GET_MODE (op
), op
);
2793 else if (MEM_P (op
))
2794 op
= validize_mem (op
);
2796 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
2798 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
2799 op
= force_reg (TYPE_MODE (type
), op
);
2800 else if (!allows_mem
)
2801 warning (0, "asm operand %d probably doesn%'t match constraints",
2803 else if (MEM_P (op
))
2805 /* We won't recognize either volatile memory or memory
2806 with a queued address as available a memory_operand
2807 at this point. Ignore it: clearly this *is* a memory. */
2813 input_mode
[i
] = TYPE_MODE (type
);
2816 /* For in-out operands, copy output rtx to input rtx. */
2817 unsigned ninout
= inout_opnum
.length();
2818 for (i
= 0; i
< ninout
; i
++)
2820 int j
= inout_opnum
[i
];
2821 rtx o
= output_rvec
[j
];
2823 input_rvec
.safe_push (o
);
2824 input_mode
.safe_push (GET_MODE (o
));
2827 sprintf (buffer
, "%d", j
);
2828 constraints
.safe_push (ggc_strdup (buffer
));
2832 /* Sometimes we wish to automatically clobber registers across an asm.
2833 Case in point is when the i386 backend moved from cc0 to a hard reg --
2834 maintaining source-level compatibility means automatically clobbering
2835 the flags register. */
2836 rtx_insn
*after_md_seq
= NULL
;
2837 if (targetm
.md_asm_adjust
)
2838 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2839 constraints
, clobber_rvec
,
2842 /* Do not allow the hook to change the output and input count,
2843 lest it mess up the operand numbering. */
2844 gcc_assert (output_rvec
.length() == noutputs
);
2845 gcc_assert (input_rvec
.length() == ninputs
);
2846 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
2848 /* But it certainly can adjust the clobbers. */
2849 nclobbers
= clobber_rvec
.length();
2851 /* Third pass checks for easy conflicts. */
2852 /* ??? Why are we doing this on trees instead of rtx. */
2854 bool clobber_conflict_found
= 0;
2855 for (i
= 0; i
< noutputs
; ++i
)
2856 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
2857 clobber_conflict_found
= 1;
2858 for (i
= 0; i
< ninputs
- ninout
; ++i
)
2859 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
2860 clobber_conflict_found
= 1;
2862 /* Make vectors for the expression-rtx, constraint strings,
2863 and named operands. */
2865 rtvec argvec
= rtvec_alloc (ninputs
);
2866 rtvec constraintvec
= rtvec_alloc (ninputs
);
2867 rtvec labelvec
= rtvec_alloc (nlabels
);
2869 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
2870 : GET_MODE (output_rvec
[0])),
2871 ggc_strdup (gimple_asm_string (stmt
)),
2872 empty_string
, 0, argvec
, constraintvec
,
2874 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
2876 for (i
= 0; i
< ninputs
; ++i
)
2878 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
2879 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
2880 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
2881 constraints
[i
+ noutputs
],
2885 /* Copy labels to the vector. */
2886 rtx_code_label
*fallthru_label
= NULL
;
2889 basic_block fallthru_bb
= NULL
;
2890 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
2892 fallthru_bb
= fallthru
->dest
;
2894 for (i
= 0; i
< nlabels
; ++i
)
2896 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
2898 /* If asm goto has any labels in the fallthru basic block, use
2899 a label that we emit immediately after the asm goto. Expansion
2900 may insert further instructions into the same basic block after
2901 asm goto and if we don't do this, insertion of instructions on
2902 the fallthru edge might misbehave. See PR58670. */
2903 if (fallthru_bb
&& label_to_block_fn (cfun
, label
) == fallthru_bb
)
2905 if (fallthru_label
== NULL_RTX
)
2906 fallthru_label
= gen_label_rtx ();
2910 r
= label_rtx (label
);
2911 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
2915 /* Now, for each output, construct an rtx
2916 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2917 ARGVEC CONSTRAINTS OPNAMES))
2918 If there is more than one, put them inside a PARALLEL. */
2920 if (nlabels
> 0 && nclobbers
== 0)
2922 gcc_assert (noutputs
== 0);
2923 emit_jump_insn (body
);
2925 else if (noutputs
== 0 && nclobbers
== 0)
2927 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2930 else if (noutputs
== 1 && nclobbers
== 0)
2932 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
2933 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
2943 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
2945 /* For each output operand, store a SET. */
2946 for (i
= 0; i
< noutputs
; ++i
)
2948 rtx src
, o
= output_rvec
[i
];
2951 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
2956 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
2957 ASM_OPERANDS_TEMPLATE (obody
),
2958 constraints
[i
], i
, argvec
,
2959 constraintvec
, labelvec
, locus
);
2960 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
2962 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
2965 /* If there are no outputs (but there are some clobbers)
2966 store the bare ASM_OPERANDS into the PARALLEL. */
2968 XVECEXP (body
, 0, i
++) = obody
;
2970 /* Store (clobber REG) for each clobbered register specified. */
2971 for (unsigned j
= 0; j
< nclobbers
; ++j
)
2973 rtx clobbered_reg
= clobber_rvec
[j
];
2975 /* Do sanity check for overlap between clobbers and respectively
2976 input and outputs that hasn't been handled. Such overlap
2977 should have been detected and reported above. */
2978 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
2980 /* We test the old body (obody) contents to avoid
2981 tripping over the under-construction body. */
2982 for (unsigned k
= 0; k
< noutputs
; ++k
)
2983 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
2984 internal_error ("asm clobber conflict with output operand");
2986 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
2987 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
2988 internal_error ("asm clobber conflict with input operand");
2991 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
2995 emit_jump_insn (body
);
3000 generating_concat_p
= old_generating_concat_p
;
3003 emit_label (fallthru_label
);
3006 emit_insn (after_md_seq
);
3008 emit_insn (after_rtl_seq
);
3011 crtl
->has_asm_statement
= 1;
3014 /* Emit code to jump to the address
3015 specified by the pointer expression EXP. */
3018 expand_computed_goto (tree exp
)
3020 rtx x
= expand_normal (exp
);
3022 do_pending_stack_adjust ();
3023 emit_indirect_jump (x
);
3026 /* Generate RTL code for a `goto' statement with target label LABEL.
3027 LABEL should be a LABEL_DECL tree node that was or will later be
3028 defined with `expand_label'. */
3031 expand_goto (tree label
)
3033 #ifdef ENABLE_CHECKING
3034 /* Check for a nonlocal goto to a containing function. Should have
3035 gotten translated to __builtin_nonlocal_goto. */
3036 tree context
= decl_function_context (label
);
3037 gcc_assert (!context
|| context
== current_function_decl
);
3040 emit_jump (jump_target_rtx (label
));
3043 /* Output a return with no value. */
3046 expand_null_return_1 (void)
3048 clear_pending_stack_adjust ();
3049 do_pending_stack_adjust ();
3050 emit_jump (return_label
);
3053 /* Generate RTL to return from the current function, with no value.
3054 (That is, we do not do anything about returning any value.) */
3057 expand_null_return (void)
3059 /* If this function was declared to return a value, but we
3060 didn't, clobber the return registers so that they are not
3061 propagated live to the rest of the function. */
3062 clobber_return_register ();
3064 expand_null_return_1 ();
3067 /* Generate RTL to return from the current function, with value VAL. */
3070 expand_value_return (rtx val
)
3072 /* Copy the value to the return location unless it's already there. */
3074 tree decl
= DECL_RESULT (current_function_decl
);
3075 rtx return_reg
= DECL_RTL (decl
);
3076 if (return_reg
!= val
)
3078 tree funtype
= TREE_TYPE (current_function_decl
);
3079 tree type
= TREE_TYPE (decl
);
3080 int unsignedp
= TYPE_UNSIGNED (type
);
3081 machine_mode old_mode
= DECL_MODE (decl
);
3083 if (DECL_BY_REFERENCE (decl
))
3084 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3086 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3088 if (mode
!= old_mode
)
3089 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3091 if (GET_CODE (return_reg
) == PARALLEL
)
3092 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3094 emit_move_insn (return_reg
, val
);
3097 expand_null_return_1 ();
3100 /* Generate RTL to evaluate the expression RETVAL and return it
3101 from the current function. */
3104 expand_return (tree retval
, tree bounds
)
3111 /* If function wants no value, give it none. */
3112 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3114 expand_normal (retval
);
3115 expand_null_return ();
3119 if (retval
== error_mark_node
)
3121 /* Treat this like a return of no value from a function that
3123 expand_null_return ();
3126 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3127 || TREE_CODE (retval
) == INIT_EXPR
)
3128 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3129 retval_rhs
= TREE_OPERAND (retval
, 1);
3131 retval_rhs
= retval
;
3133 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3135 /* Put returned bounds to the right place. */
3136 bounds_rtl
= DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl
));
3143 bnd
= expand_normal (bounds
);
3144 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3146 else if (REG_P (bounds_rtl
))
3148 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3149 addr
= gen_rtx_MEM (Pmode
, addr
);
3150 bnd
= targetm
.calls
.load_bounds_for_arg (addr
, NULL
, NULL
);
3151 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3157 gcc_assert (GET_CODE (bounds_rtl
) == PARALLEL
);
3159 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3160 addr
= gen_rtx_MEM (Pmode
, addr
);
3162 for (n
= 0; n
< XVECLEN (bounds_rtl
, 0); n
++)
3164 rtx offs
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 1);
3165 rtx slot
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 0);
3166 rtx from
= adjust_address (addr
, Pmode
, INTVAL (offs
));
3167 rtx bnd
= targetm
.calls
.load_bounds_for_arg (from
, NULL
, NULL
);
3168 targetm
.calls
.store_returned_bounds (slot
, bnd
);
3172 else if (chkp_function_instrumented_p (current_function_decl
)
3173 && !BOUNDED_P (retval_rhs
)
3174 && chkp_type_has_pointer (TREE_TYPE (retval_rhs
))
3175 && TREE_CODE (retval_rhs
) != RESULT_DECL
)
3177 rtx addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3178 addr
= gen_rtx_MEM (Pmode
, addr
);
3180 gcc_assert (MEM_P (result_rtl
));
3182 chkp_copy_bounds_for_stack_parm (result_rtl
, addr
, TREE_TYPE (retval_rhs
));
3185 /* If we are returning the RESULT_DECL, then the value has already
3186 been stored into it, so we don't have to do anything special. */
3187 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3188 expand_value_return (result_rtl
);
3190 /* If the result is an aggregate that is being returned in one (or more)
3191 registers, load the registers here. */
3193 else if (retval_rhs
!= 0
3194 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3195 && REG_P (result_rtl
))
3197 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3200 /* Use the mode of the result value on the return register. */
3201 PUT_MODE (result_rtl
, GET_MODE (val
));
3202 expand_value_return (val
);
3205 expand_null_return ();
3207 else if (retval_rhs
!= 0
3208 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3209 && (REG_P (result_rtl
)
3210 || (GET_CODE (result_rtl
) == PARALLEL
)))
3212 /* Compute the return value into a temporary (usually a pseudo reg). */
3214 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3215 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3216 val
= force_not_mem (val
);
3217 expand_value_return (val
);
3221 /* No hard reg used; calculate value into hard return reg. */
3222 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3223 expand_value_return (result_rtl
);
3227 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3228 STMT that doesn't require special handling for outgoing edges. That
3229 is no tailcalls and no GIMPLE_COND. */
3232 expand_gimple_stmt_1 (gimple stmt
)
3236 set_curr_insn_location (gimple_location (stmt
));
3238 switch (gimple_code (stmt
))
3241 op0
= gimple_goto_dest (stmt
);
3242 if (TREE_CODE (op0
) == LABEL_DECL
)
3245 expand_computed_goto (op0
);
3248 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3251 case GIMPLE_PREDICT
:
3254 expand_case (as_a
<gswitch
*> (stmt
));
3257 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3260 expand_call_stmt (as_a
<gcall
*> (stmt
));
3264 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3266 if (op0
&& op0
!= error_mark_node
)
3268 tree result
= DECL_RESULT (current_function_decl
);
3270 /* If we are not returning the current function's RESULT_DECL,
3271 build an assignment to it. */
3274 /* I believe that a function's RESULT_DECL is unique. */
3275 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3277 /* ??? We'd like to use simply expand_assignment here,
3278 but this fails if the value is of BLKmode but the return
3279 decl is a register. expand_return has special handling
3280 for this combination, which eventually should move
3281 to common code. See comments there. Until then, let's
3282 build a modify expression :-/ */
3283 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3288 expand_null_return ();
3290 expand_return (op0
, gimple_return_retbnd (stmt
));
3295 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3296 tree lhs
= gimple_assign_lhs (assign_stmt
);
3298 /* Tree expand used to fiddle with |= and &= of two bitfield
3299 COMPONENT_REFs here. This can't happen with gimple, the LHS
3300 of binary assigns must be a gimple reg. */
3302 if (TREE_CODE (lhs
) != SSA_NAME
3303 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3304 == GIMPLE_SINGLE_RHS
)
3306 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3307 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3308 == GIMPLE_SINGLE_RHS
);
3309 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
))
3310 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3311 if (TREE_CLOBBER_P (rhs
))
3312 /* This is a clobber to mark the going out of scope for
3316 expand_assignment (lhs
, rhs
,
3317 gimple_assign_nontemporal_move_p (
3323 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3324 struct separate_ops ops
;
3325 bool promoted
= false;
3327 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3328 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3331 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3332 ops
.type
= TREE_TYPE (lhs
);
3333 switch (get_gimple_rhs_class (ops
.code
))
3335 case GIMPLE_TERNARY_RHS
:
3336 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3338 case GIMPLE_BINARY_RHS
:
3339 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3341 case GIMPLE_UNARY_RHS
:
3342 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3347 ops
.location
= gimple_location (stmt
);
3349 /* If we want to use a nontemporal store, force the value to
3350 register first. If we store into a promoted register,
3351 don't directly expand to target. */
3352 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3353 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3360 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3361 /* If TEMP is a VOIDmode constant, use convert_modes to make
3362 sure that we properly convert it. */
3363 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3365 temp
= convert_modes (GET_MODE (target
),
3366 TYPE_MODE (ops
.type
),
3368 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3369 GET_MODE (target
), temp
, unsignedp
);
3372 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3374 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3378 temp
= force_operand (temp
, target
);
3380 emit_move_insn (target
, temp
);
3391 /* Expand one gimple statement STMT and return the last RTL instruction
3392 before any of the newly generated ones.
3394 In addition to generating the necessary RTL instructions this also
3395 sets REG_EH_REGION notes if necessary and sets the current source
3396 location for diagnostics. */
3399 expand_gimple_stmt (gimple stmt
)
3401 location_t saved_location
= input_location
;
3402 rtx_insn
*last
= get_last_insn ();
3407 /* We need to save and restore the current source location so that errors
3408 discovered during expansion are emitted with the right location. But
3409 it would be better if the diagnostic routines used the source location
3410 embedded in the tree nodes rather than globals. */
3411 if (gimple_has_location (stmt
))
3412 input_location
= gimple_location (stmt
);
3414 expand_gimple_stmt_1 (stmt
);
3416 /* Free any temporaries used to evaluate this statement. */
3419 input_location
= saved_location
;
3421 /* Mark all insns that may trap. */
3422 lp_nr
= lookup_stmt_eh_lp (stmt
);
3426 for (insn
= next_real_insn (last
); insn
;
3427 insn
= next_real_insn (insn
))
3429 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3430 /* If we want exceptions for non-call insns, any
3431 may_trap_p instruction may throw. */
3432 && GET_CODE (PATTERN (insn
)) != CLOBBER
3433 && GET_CODE (PATTERN (insn
)) != USE
3434 && insn_could_throw_p (insn
))
3435 make_reg_eh_region_note (insn
, 0, lp_nr
);
3442 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3443 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3444 generated a tail call (something that might be denied by the ABI
3445 rules governing the call; see calls.c).
3447 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3448 can still reach the rest of BB. The case here is __builtin_sqrt,
3449 where the NaN result goes through the external function (with a
3450 tailcall) and the normal result happens via a sqrt instruction. */
3453 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3455 rtx_insn
*last2
, *last
;
3461 last2
= last
= expand_gimple_stmt (stmt
);
3463 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3464 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3467 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3469 *can_fallthru
= true;
3473 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3474 Any instructions emitted here are about to be deleted. */
3475 do_pending_stack_adjust ();
3477 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3478 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3479 EH or abnormal edges, we shouldn't have created a tail call in
3480 the first place. So it seems to me we should just be removing
3481 all edges here, or redirecting the existing fallthru edge to
3487 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3489 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3491 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3493 e
->dest
->count
-= e
->count
;
3494 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
3495 if (e
->dest
->count
< 0)
3497 if (e
->dest
->frequency
< 0)
3498 e
->dest
->frequency
= 0;
3501 probability
+= e
->probability
;
3508 /* This is somewhat ugly: the call_expr expander often emits instructions
3509 after the sibcall (to perform the function return). These confuse the
3510 find_many_sub_basic_blocks code, so we need to get rid of these. */
3511 last
= NEXT_INSN (last
);
3512 gcc_assert (BARRIER_P (last
));
3514 *can_fallthru
= false;
3515 while (NEXT_INSN (last
))
3517 /* For instance an sqrt builtin expander expands if with
3518 sibcall in the then and label for `else`. */
3519 if (LABEL_P (NEXT_INSN (last
)))
3521 *can_fallthru
= true;
3524 delete_insn (NEXT_INSN (last
));
3527 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3529 e
->probability
+= probability
;
3532 update_bb_for_insn (bb
);
3534 if (NEXT_INSN (last
))
3536 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3539 if (BARRIER_P (last
))
3540 BB_END (bb
) = PREV_INSN (last
);
3543 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3548 /* Return the difference between the floor and the truncated result of
3549 a signed division by OP1 with remainder MOD. */
3551 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3553 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3554 return gen_rtx_IF_THEN_ELSE
3555 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3556 gen_rtx_IF_THEN_ELSE
3557 (mode
, gen_rtx_LT (BImode
,
3558 gen_rtx_DIV (mode
, op1
, mod
),
3560 constm1_rtx
, const0_rtx
),
3564 /* Return the difference between the ceil and the truncated result of
3565 a signed division by OP1 with remainder MOD. */
3567 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3569 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3570 return gen_rtx_IF_THEN_ELSE
3571 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3572 gen_rtx_IF_THEN_ELSE
3573 (mode
, gen_rtx_GT (BImode
,
3574 gen_rtx_DIV (mode
, op1
, mod
),
3576 const1_rtx
, const0_rtx
),
3580 /* Return the difference between the ceil and the truncated result of
3581 an unsigned division by OP1 with remainder MOD. */
3583 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3585 /* (mod != 0 ? 1 : 0) */
3586 return gen_rtx_IF_THEN_ELSE
3587 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3588 const1_rtx
, const0_rtx
);
3591 /* Return the difference between the rounded and the truncated result
3592 of a signed division by OP1 with remainder MOD. Halfway cases are
3593 rounded away from zero, rather than to the nearest even number. */
3595 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3597 /* (abs (mod) >= abs (op1) - abs (mod)
3598 ? (op1 / mod > 0 ? 1 : -1)
3600 return gen_rtx_IF_THEN_ELSE
3601 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3602 gen_rtx_MINUS (mode
,
3603 gen_rtx_ABS (mode
, op1
),
3604 gen_rtx_ABS (mode
, mod
))),
3605 gen_rtx_IF_THEN_ELSE
3606 (mode
, gen_rtx_GT (BImode
,
3607 gen_rtx_DIV (mode
, op1
, mod
),
3609 const1_rtx
, constm1_rtx
),
3613 /* Return the difference between the rounded and the truncated result
3614 of a unsigned division by OP1 with remainder MOD. Halfway cases
3615 are rounded away from zero, rather than to the nearest even
3618 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3620 /* (mod >= op1 - mod ? 1 : 0) */
3621 return gen_rtx_IF_THEN_ELSE
3622 (mode
, gen_rtx_GE (BImode
, mod
,
3623 gen_rtx_MINUS (mode
, op1
, mod
)),
3624 const1_rtx
, const0_rtx
);
3627 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3631 convert_debug_memory_address (machine_mode mode
, rtx x
,
3634 machine_mode xmode
= GET_MODE (x
);
3636 #ifndef POINTERS_EXTEND_UNSIGNED
3637 gcc_assert (mode
== Pmode
3638 || mode
== targetm
.addr_space
.address_mode (as
));
3639 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
3643 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
3645 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
3648 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
3649 x
= simplify_gen_subreg (mode
, x
, xmode
,
3650 subreg_lowpart_offset
3652 else if (POINTERS_EXTEND_UNSIGNED
> 0)
3653 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
3654 else if (!POINTERS_EXTEND_UNSIGNED
)
3655 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
3658 switch (GET_CODE (x
))
3661 if ((SUBREG_PROMOTED_VAR_P (x
)
3662 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
3663 || (GET_CODE (SUBREG_REG (x
)) == PLUS
3664 && REG_P (XEXP (SUBREG_REG (x
), 0))
3665 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
3666 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
3667 && GET_MODE (SUBREG_REG (x
)) == mode
)
3668 return SUBREG_REG (x
);
3671 temp
= gen_rtx_LABEL_REF (mode
, LABEL_REF_LABEL (x
));
3672 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
3675 temp
= shallow_copy_rtx (x
);
3676 PUT_MODE (temp
, mode
);
3679 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
3681 temp
= gen_rtx_CONST (mode
, temp
);
3685 if (CONST_INT_P (XEXP (x
, 1)))
3687 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
3689 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
3695 /* Don't know how to express ptr_extend as operation in debug info. */
3698 #endif /* POINTERS_EXTEND_UNSIGNED */
3703 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3704 by avoid_deep_ter_for_debug. */
3706 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
3708 /* Split too deep TER chains for debug stmts using debug temporaries. */
3711 avoid_deep_ter_for_debug (gimple stmt
, int depth
)
3713 use_operand_p use_p
;
3715 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
3717 tree use
= USE_FROM_PTR (use_p
);
3718 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
3720 gimple g
= get_gimple_for_ssa_name (use
);
3723 if (depth
> 6 && !stmt_ends_bb_p (g
))
3725 if (deep_ter_debug_map
== NULL
)
3726 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
3728 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
3731 vexpr
= make_node (DEBUG_EXPR_DECL
);
3732 gimple def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
3733 DECL_ARTIFICIAL (vexpr
) = 1;
3734 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
3735 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (use
));
3736 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
3737 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
3738 avoid_deep_ter_for_debug (def_temp
, 0);
3741 avoid_deep_ter_for_debug (g
, depth
+ 1);
3745 /* Return an RTX equivalent to the value of the parameter DECL. */
3748 expand_debug_parm_decl (tree decl
)
3750 rtx incoming
= DECL_INCOMING_RTL (decl
);
3753 && GET_MODE (incoming
) != BLKmode
3754 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
3755 || (MEM_P (incoming
)
3756 && REG_P (XEXP (incoming
, 0))
3757 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
3759 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
3761 #ifdef HAVE_window_save
3762 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3763 If the target machine has an explicit window save instruction, the
3764 actual entry value is the corresponding OUTGOING_REGNO instead. */
3765 if (REG_P (incoming
)
3766 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
3768 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
3769 OUTGOING_REGNO (REGNO (incoming
)), 0);
3770 else if (MEM_P (incoming
))
3772 rtx reg
= XEXP (incoming
, 0);
3773 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
3775 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
3776 incoming
= replace_equiv_address_nv (incoming
, reg
);
3779 incoming
= copy_rtx (incoming
);
3783 ENTRY_VALUE_EXP (rtl
) = incoming
;
3788 && GET_MODE (incoming
) != BLKmode
3789 && !TREE_ADDRESSABLE (decl
)
3791 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
3792 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
3793 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
3794 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
3795 return copy_rtx (incoming
);
3800 /* Return an RTX equivalent to the value of the tree expression EXP. */
3803 expand_debug_expr (tree exp
)
3805 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
3806 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
3807 machine_mode inner_mode
= VOIDmode
;
3808 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
3811 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
3813 case tcc_expression
:
3814 switch (TREE_CODE (exp
))
3819 case WIDEN_MULT_PLUS_EXPR
:
3820 case WIDEN_MULT_MINUS_EXPR
:
3824 case TRUTH_ANDIF_EXPR
:
3825 case TRUTH_ORIF_EXPR
:
3826 case TRUTH_AND_EXPR
:
3828 case TRUTH_XOR_EXPR
:
3831 case TRUTH_NOT_EXPR
:
3840 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
3847 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
3850 switch (TREE_CODE (exp
))
3856 case WIDEN_LSHIFT_EXPR
:
3857 /* Ensure second operand isn't wider than the first one. */
3858 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
3859 if (SCALAR_INT_MODE_P (inner_mode
))
3861 machine_mode opmode
= mode
;
3862 if (VECTOR_MODE_P (mode
))
3863 opmode
= GET_MODE_INNER (mode
);
3864 if (SCALAR_INT_MODE_P (opmode
)
3865 && (GET_MODE_PRECISION (opmode
)
3866 < GET_MODE_PRECISION (inner_mode
)))
3867 op1
= simplify_gen_subreg (opmode
, op1
, inner_mode
,
3868 subreg_lowpart_offset (opmode
,
3879 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3880 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
3885 case tcc_comparison
:
3886 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3894 case tcc_exceptional
:
3895 case tcc_declaration
:
3901 switch (TREE_CODE (exp
))
3904 if (!lookup_constant_def (exp
))
3906 if (strlen (TREE_STRING_POINTER (exp
)) + 1
3907 != (size_t) TREE_STRING_LENGTH (exp
))
3909 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
3910 op0
= gen_rtx_MEM (BLKmode
, op0
);
3911 set_mem_attributes (op0
, exp
, 0);
3914 /* Fall through... */
3919 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
3923 gcc_assert (COMPLEX_MODE_P (mode
));
3924 op0
= expand_debug_expr (TREE_REALPART (exp
));
3925 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
3926 return gen_rtx_CONCAT (mode
, op0
, op1
);
3928 case DEBUG_EXPR_DECL
:
3929 op0
= DECL_RTL_IF_SET (exp
);
3934 op0
= gen_rtx_DEBUG_EXPR (mode
);
3935 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
3936 SET_DECL_RTL (exp
, op0
);
3946 op0
= DECL_RTL_IF_SET (exp
);
3948 /* This decl was probably optimized away. */
3951 if (TREE_CODE (exp
) != VAR_DECL
3952 || DECL_EXTERNAL (exp
)
3953 || !TREE_STATIC (exp
)
3955 || DECL_HARD_REGISTER (exp
)
3956 || DECL_IN_CONSTANT_POOL (exp
)
3957 || mode
== VOIDmode
)
3960 op0
= make_decl_rtl_for_debug (exp
);
3962 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
3963 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
3967 op0
= copy_rtx (op0
);
3969 if (GET_MODE (op0
) == BLKmode
3970 /* If op0 is not BLKmode, but mode is, adjust_mode
3971 below would ICE. While it is likely a FE bug,
3972 try to be robust here. See PR43166. */
3974 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
3976 gcc_assert (MEM_P (op0
));
3977 op0
= adjust_address_nv (op0
, mode
, 0);
3987 inner_mode
= GET_MODE (op0
);
3989 if (mode
== inner_mode
)
3992 if (inner_mode
== VOIDmode
)
3994 if (TREE_CODE (exp
) == SSA_NAME
)
3995 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
3997 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
3998 if (mode
== inner_mode
)
4002 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4004 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
4005 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4006 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
4007 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4009 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4011 else if (FLOAT_MODE_P (mode
))
4013 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4014 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4015 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4017 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4019 else if (FLOAT_MODE_P (inner_mode
))
4022 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4024 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4026 else if (CONSTANT_P (op0
)
4027 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
4028 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
4029 subreg_lowpart_offset (mode
,
4031 else if (UNARY_CLASS_P (exp
)
4032 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4034 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4036 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4042 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4044 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4045 TREE_OPERAND (exp
, 0),
4046 TREE_OPERAND (exp
, 1));
4048 return expand_debug_expr (newexp
);
4052 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4053 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4057 if (TREE_CODE (exp
) == MEM_REF
)
4059 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4060 || (GET_CODE (op0
) == PLUS
4061 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4062 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4063 Instead just use get_inner_reference. */
4066 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4067 if (!op1
|| !CONST_INT_P (op1
))
4070 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
4073 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4075 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4077 if (op0
== NULL_RTX
)
4080 op0
= gen_rtx_MEM (mode
, op0
);
4081 set_mem_attributes (op0
, exp
, 0);
4082 if (TREE_CODE (exp
) == MEM_REF
4083 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4084 set_mem_expr (op0
, NULL_TREE
);
4085 set_mem_addr_space (op0
, as
);
4089 case TARGET_MEM_REF
:
4090 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4091 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4094 op0
= expand_debug_expr
4095 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4099 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4100 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4102 if (op0
== NULL_RTX
)
4105 op0
= gen_rtx_MEM (mode
, op0
);
4107 set_mem_attributes (op0
, exp
, 0);
4108 set_mem_addr_space (op0
, as
);
4114 case ARRAY_RANGE_REF
:
4119 case VIEW_CONVERT_EXPR
:
4122 HOST_WIDE_INT bitsize
, bitpos
;
4125 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
4126 &mode1
, &unsignedp
, &volatilep
, false);
4132 orig_op0
= op0
= expand_debug_expr (tem
);
4139 machine_mode addrmode
, offmode
;
4144 op0
= XEXP (op0
, 0);
4145 addrmode
= GET_MODE (op0
);
4146 if (addrmode
== VOIDmode
)
4149 op1
= expand_debug_expr (offset
);
4153 offmode
= GET_MODE (op1
);
4154 if (offmode
== VOIDmode
)
4155 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4157 if (addrmode
!= offmode
)
4158 op1
= simplify_gen_subreg (addrmode
, op1
, offmode
,
4159 subreg_lowpart_offset (addrmode
,
4162 /* Don't use offset_address here, we don't need a
4163 recognizable address, and we don't want to generate
4165 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4171 if (mode1
== VOIDmode
)
4173 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
4174 if (bitpos
>= BITS_PER_UNIT
)
4176 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
4177 bitpos
%= BITS_PER_UNIT
;
4179 else if (bitpos
< 0)
4182 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
4183 op0
= adjust_address_nv (op0
, mode1
, units
);
4184 bitpos
+= units
* BITS_PER_UNIT
;
4186 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
4187 op0
= adjust_address_nv (op0
, mode
, 0);
4188 else if (GET_MODE (op0
) != mode1
)
4189 op0
= adjust_address_nv (op0
, mode1
, 0);
4191 op0
= copy_rtx (op0
);
4192 if (op0
== orig_op0
)
4193 op0
= shallow_copy_rtx (op0
);
4194 set_mem_attributes (op0
, exp
, 0);
4197 if (bitpos
== 0 && mode
== GET_MODE (op0
))
4203 if (GET_MODE (op0
) == BLKmode
)
4206 if ((bitpos
% BITS_PER_UNIT
) == 0
4207 && bitsize
== GET_MODE_BITSIZE (mode1
))
4209 machine_mode opmode
= GET_MODE (op0
);
4211 if (opmode
== VOIDmode
)
4212 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4214 /* This condition may hold if we're expanding the address
4215 right past the end of an array that turned out not to
4216 be addressable (i.e., the address was only computed in
4217 debug stmts). The gen_subreg below would rightfully
4218 crash, and the address doesn't really exist, so just
4220 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
4223 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
4224 return simplify_gen_subreg (mode
, op0
, opmode
,
4225 bitpos
/ BITS_PER_UNIT
);
4228 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4229 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4231 : ZERO_EXTRACT
, mode
,
4232 GET_MODE (op0
) != VOIDmode
4234 : TYPE_MODE (TREE_TYPE (tem
)),
4235 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
4239 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4242 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4245 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4248 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4250 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4253 case FIX_TRUNC_EXPR
:
4254 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4257 case POINTER_PLUS_EXPR
:
4258 /* For the rare target where pointers are not the same size as
4259 size_t, we need to check for mis-matched modes and correct
4262 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
4263 && GET_MODE (op0
) != GET_MODE (op1
))
4265 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
))
4266 /* If OP0 is a partial mode, then we must truncate, even if it has
4267 the same bitsize as OP1 as GCC's representation of partial modes
4269 || (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_PARTIAL_INT
4270 && GET_MODE_BITSIZE (GET_MODE (op0
)) == GET_MODE_BITSIZE (GET_MODE (op1
))))
4271 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
4274 /* We always sign-extend, regardless of the signedness of
4275 the operand, because the operand is always unsigned
4276 here even if the original C expression is signed. */
4277 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
4282 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4285 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4288 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4291 case TRUNC_DIV_EXPR
:
4292 case EXACT_DIV_EXPR
:
4294 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4296 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4298 case TRUNC_MOD_EXPR
:
4299 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4301 case FLOOR_DIV_EXPR
:
4303 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4306 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4307 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4308 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4309 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4312 case FLOOR_MOD_EXPR
:
4314 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4317 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4318 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4319 adj
= simplify_gen_unary (NEG
, mode
,
4320 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4322 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4328 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4329 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4330 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4331 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4335 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4336 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4337 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4338 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4344 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4345 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4346 adj
= simplify_gen_unary (NEG
, mode
,
4347 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4349 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4353 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4354 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4355 adj
= simplify_gen_unary (NEG
, mode
,
4356 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4358 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4361 case ROUND_DIV_EXPR
:
4364 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4365 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4366 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4367 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4371 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4372 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4373 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4374 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4377 case ROUND_MOD_EXPR
:
4380 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4381 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4382 adj
= simplify_gen_unary (NEG
, mode
,
4383 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4385 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4389 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4390 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4391 adj
= simplify_gen_unary (NEG
, mode
,
4392 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4394 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4398 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4402 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4404 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4407 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4410 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4413 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4416 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4419 case TRUTH_AND_EXPR
:
4420 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4424 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4427 case TRUTH_XOR_EXPR
:
4428 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4430 case TRUTH_ANDIF_EXPR
:
4431 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4433 case TRUTH_ORIF_EXPR
:
4434 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4436 case TRUTH_NOT_EXPR
:
4437 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4440 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4444 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4448 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4452 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4456 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4459 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4461 case UNORDERED_EXPR
:
4462 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4465 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4468 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4471 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4474 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4477 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4480 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4483 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4486 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4489 gcc_assert (COMPLEX_MODE_P (mode
));
4490 if (GET_MODE (op0
) == VOIDmode
)
4491 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4492 if (GET_MODE (op1
) == VOIDmode
)
4493 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4494 return gen_rtx_CONCAT (mode
, op0
, op1
);
4497 if (GET_CODE (op0
) == CONCAT
)
4498 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4499 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4501 GET_MODE_INNER (mode
)));
4504 machine_mode imode
= GET_MODE_INNER (mode
);
4509 re
= adjust_address_nv (op0
, imode
, 0);
4510 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4514 machine_mode ifmode
= int_mode_for_mode (mode
);
4515 machine_mode ihmode
= int_mode_for_mode (imode
);
4517 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
4519 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4522 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4523 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4524 if (imode
!= ihmode
)
4525 re
= gen_rtx_SUBREG (imode
, re
, 0);
4526 im
= copy_rtx (op0
);
4528 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4529 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4530 if (imode
!= ihmode
)
4531 im
= gen_rtx_SUBREG (imode
, im
, 0);
4533 im
= gen_rtx_NEG (imode
, im
);
4534 return gen_rtx_CONCAT (mode
, re
, im
);
4538 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4539 if (!op0
|| !MEM_P (op0
))
4541 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4542 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4543 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4544 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4545 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4546 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4548 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4550 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
4552 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
4553 &bitoffset
, &bitsize
, &maxsize
);
4554 if ((TREE_CODE (decl
) == VAR_DECL
4555 || TREE_CODE (decl
) == PARM_DECL
4556 || TREE_CODE (decl
) == RESULT_DECL
)
4557 && (!TREE_ADDRESSABLE (decl
)
4558 || target_for_debug_bind (decl
))
4559 && (bitoffset
% BITS_PER_UNIT
) == 0
4561 && bitsize
== maxsize
)
4563 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4564 return plus_constant (mode
, base
, bitoffset
/ BITS_PER_UNIT
);
4568 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4569 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4572 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4575 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4576 || (GET_CODE (op0
) == PLUS
4577 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4578 && CONST_INT_P (XEXP (op0
, 1)))))
4580 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4582 if (!op1
|| !CONST_INT_P (op1
))
4585 return plus_constant (mode
, op0
, INTVAL (op1
));
4592 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4593 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
4601 op0
= gen_rtx_CONCATN
4602 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4604 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
4606 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
4609 XVECEXP (op0
, 0, i
) = op1
;
4616 if (TREE_CLOBBER_P (exp
))
4618 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
4623 op0
= gen_rtx_CONCATN
4624 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4626 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
4628 op1
= expand_debug_expr (val
);
4631 XVECEXP (op0
, 0, i
) = op1
;
4634 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
4636 op1
= expand_debug_expr
4637 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
4642 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
4643 XVECEXP (op0
, 0, i
) = op1
;
4649 goto flag_unsupported
;
4652 /* ??? Maybe handle some builtins? */
4657 gimple g
= get_gimple_for_ssa_name (exp
);
4661 if (deep_ter_debug_map
)
4663 tree
*slot
= deep_ter_debug_map
->get (exp
);
4668 t
= gimple_assign_rhs_to_tree (g
);
4669 op0
= expand_debug_expr (t
);
4675 int part
= var_to_partition (SA
.map
, exp
);
4677 if (part
== NO_PARTITION
)
4679 /* If this is a reference to an incoming value of parameter
4680 that is never used in the code or where the incoming
4681 value is never used in the code, use PARM_DECL's
4683 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
4684 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
)
4686 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
4689 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
4696 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
4698 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
4706 /* Vector stuff. For most of the codes we don't have rtl codes. */
4707 case REALIGN_LOAD_EXPR
:
4708 case REDUC_MAX_EXPR
:
4709 case REDUC_MIN_EXPR
:
4710 case REDUC_PLUS_EXPR
:
4712 case VEC_PACK_FIX_TRUNC_EXPR
:
4713 case VEC_PACK_SAT_EXPR
:
4714 case VEC_PACK_TRUNC_EXPR
:
4715 case VEC_UNPACK_FLOAT_HI_EXPR
:
4716 case VEC_UNPACK_FLOAT_LO_EXPR
:
4717 case VEC_UNPACK_HI_EXPR
:
4718 case VEC_UNPACK_LO_EXPR
:
4719 case VEC_WIDEN_MULT_HI_EXPR
:
4720 case VEC_WIDEN_MULT_LO_EXPR
:
4721 case VEC_WIDEN_MULT_EVEN_EXPR
:
4722 case VEC_WIDEN_MULT_ODD_EXPR
:
4723 case VEC_WIDEN_LSHIFT_HI_EXPR
:
4724 case VEC_WIDEN_LSHIFT_LO_EXPR
:
4729 case ADDR_SPACE_CONVERT_EXPR
:
4730 case FIXED_CONVERT_EXPR
:
4732 case WITH_SIZE_EXPR
:
4736 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
4737 && SCALAR_INT_MODE_P (mode
))
4740 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4742 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
4745 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4747 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
4749 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
4750 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
4754 case WIDEN_MULT_EXPR
:
4755 case WIDEN_MULT_PLUS_EXPR
:
4756 case WIDEN_MULT_MINUS_EXPR
:
4757 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
4758 && SCALAR_INT_MODE_P (mode
))
4760 inner_mode
= GET_MODE (op0
);
4761 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4762 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4764 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4765 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
4766 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
4768 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
4769 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
4770 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
4772 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
4773 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
4775 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
4779 case MULT_HIGHPART_EXPR
:
4780 /* ??? Similar to the above. */
4783 case WIDEN_SUM_EXPR
:
4784 case WIDEN_LSHIFT_EXPR
:
4785 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
4786 && SCALAR_INT_MODE_P (mode
))
4789 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4791 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
4793 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
4794 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
4799 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
4803 #ifdef ENABLE_CHECKING
4812 /* Return an RTX equivalent to the source bind value of the tree expression
4816 expand_debug_source_expr (tree exp
)
4819 machine_mode mode
= VOIDmode
, inner_mode
;
4821 switch (TREE_CODE (exp
))
4825 mode
= DECL_MODE (exp
);
4826 op0
= expand_debug_parm_decl (exp
);
4829 /* See if this isn't an argument that has been completely
4831 if (!DECL_RTL_SET_P (exp
)
4832 && !DECL_INCOMING_RTL (exp
)
4833 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
4835 tree aexp
= DECL_ORIGIN (exp
);
4836 if (DECL_CONTEXT (aexp
)
4837 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
4839 vec
<tree
, va_gc
> **debug_args
;
4842 debug_args
= decl_debug_args_lookup (current_function_decl
);
4843 if (debug_args
!= NULL
)
4845 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
4848 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
4858 if (op0
== NULL_RTX
)
4861 inner_mode
= GET_MODE (op0
);
4862 if (mode
== inner_mode
)
4865 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4867 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
4868 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4869 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
4870 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4872 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4874 else if (FLOAT_MODE_P (mode
))
4876 else if (FLOAT_MODE_P (inner_mode
))
4878 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
4879 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4881 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4883 else if (CONSTANT_P (op0
)
4884 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
4885 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
4886 subreg_lowpart_offset (mode
, inner_mode
));
4887 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
4888 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4890 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4895 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4896 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4897 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4900 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
4904 if (exp
== NULL_RTX
)
4907 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
4912 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4913 rtx dval
= make_debug_expr_from_rtl (exp
);
4915 /* Emit a debug bind insn before INSN. */
4916 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
4917 DEBUG_EXPR_TREE_DECL (dval
), exp
,
4918 VAR_INIT_STATUS_INITIALIZED
);
4920 emit_debug_insn_before (bind
, insn
);
4925 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
4927 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
4928 switch (*format_ptr
++)
4931 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
4936 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
4937 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
4945 /* Expand the _LOCs in debug insns. We run this after expanding all
4946 regular insns, so that any variables referenced in the function
4947 will have their DECL_RTLs set. */
4950 expand_debug_locations (void)
4953 rtx_insn
*last
= get_last_insn ();
4954 int save_strict_alias
= flag_strict_aliasing
;
4956 /* New alias sets while setting up memory attributes cause
4957 -fcompare-debug failures, even though it doesn't bring about any
4959 flag_strict_aliasing
= 0;
4961 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4962 if (DEBUG_INSN_P (insn
))
4964 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
4966 rtx_insn
*prev_insn
, *insn2
;
4969 if (value
== NULL_TREE
)
4973 if (INSN_VAR_LOCATION_STATUS (insn
)
4974 == VAR_INIT_STATUS_UNINITIALIZED
)
4975 val
= expand_debug_source_expr (value
);
4976 /* The avoid_deep_ter_for_debug function inserts
4977 debug bind stmts after SSA_NAME definition, with the
4978 SSA_NAME as the whole bind location. Disable temporarily
4979 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
4980 being defined in this DEBUG_INSN. */
4981 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
4983 tree
*slot
= deep_ter_debug_map
->get (value
);
4986 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
4991 val
= expand_debug_expr (value
);
4993 *slot
= INSN_VAR_LOCATION_DECL (insn
);
4996 val
= expand_debug_expr (value
);
4997 gcc_assert (last
== get_last_insn ());
5001 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5004 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5006 gcc_assert (mode
== GET_MODE (val
)
5007 || (GET_MODE (val
) == VOIDmode
5008 && (CONST_SCALAR_INT_P (val
)
5009 || GET_CODE (val
) == CONST_FIXED
5010 || GET_CODE (val
) == LABEL_REF
)));
5013 INSN_VAR_LOCATION_LOC (insn
) = val
;
5014 prev_insn
= PREV_INSN (insn
);
5015 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5016 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5019 flag_strict_aliasing
= save_strict_alias
;
5022 /* Performs swapping operands of commutative operations to expand
5023 the expensive one first. */
5026 reorder_operands (basic_block bb
)
5028 unsigned int *lattice
; /* Hold cost of each statement. */
5029 unsigned int i
= 0, n
= 0;
5030 gimple_stmt_iterator gsi
;
5036 use_operand_p use_p
;
5039 /* Compute cost of each statement using estimate_num_insns. */
5040 stmts
= bb_seq (bb
);
5041 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5043 stmt
= gsi_stmt (gsi
);
5044 if (!is_gimple_debug (stmt
))
5045 gimple_set_uid (stmt
, n
++);
5047 lattice
= XNEWVEC (unsigned int, n
);
5048 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5051 stmt
= gsi_stmt (gsi
);
5052 if (is_gimple_debug (stmt
))
5054 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5056 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5058 tree use
= USE_FROM_PTR (use_p
);
5060 if (TREE_CODE (use
) != SSA_NAME
)
5062 def_stmt
= get_gimple_for_ssa_name (use
);
5065 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5068 if (!is_gimple_assign (stmt
)
5069 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5071 op0
= gimple_op (stmt
, 1);
5072 op1
= gimple_op (stmt
, 2);
5073 if (TREE_CODE (op0
) != SSA_NAME
5074 || TREE_CODE (op1
) != SSA_NAME
)
5076 /* Swap operands if the second one is more expensive. */
5077 def0
= get_gimple_for_ssa_name (op0
);
5078 def1
= get_gimple_for_ssa_name (op1
);
5082 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5086 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5088 fprintf (dump_file
, "Swap operands in stmt:\n");
5089 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5090 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5091 def0
? lattice
[gimple_uid (def0
)] : 0,
5092 lattice
[gimple_uid (def1
)]);
5094 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5095 gimple_assign_rhs2_ptr (stmt
));
5101 /* Expand basic block BB from GIMPLE trees to RTL. */
5104 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5106 gimple_stmt_iterator gsi
;
5115 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5118 /* Note that since we are now transitioning from GIMPLE to RTL, we
5119 cannot use the gsi_*_bb() routines because they expect the basic
5120 block to be in GIMPLE, instead of RTL. Therefore, we need to
5121 access the BB sequence directly. */
5123 reorder_operands (bb
);
5124 stmts
= bb_seq (bb
);
5125 bb
->il
.gimple
.seq
= NULL
;
5126 bb
->il
.gimple
.phi_nodes
= NULL
;
5127 rtl_profile_for_bb (bb
);
5128 init_rtl_bb_info (bb
);
5129 bb
->flags
|= BB_RTL
;
5131 /* Remove the RETURN_EXPR if we may fall though to the exit
5133 gsi
= gsi_last (stmts
);
5134 if (!gsi_end_p (gsi
)
5135 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5137 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5139 gcc_assert (single_succ_p (bb
));
5140 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5142 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5143 && !gimple_return_retval (ret_stmt
))
5145 gsi_remove (&gsi
, false);
5146 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5150 gsi
= gsi_start (stmts
);
5151 if (!gsi_end_p (gsi
))
5153 stmt
= gsi_stmt (gsi
);
5154 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5158 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5162 last
= get_last_insn ();
5166 expand_gimple_stmt (stmt
);
5173 /* Java emits line number notes in the top of labels.
5174 ??? Make this go away once line number notes are obsoleted. */
5175 BB_HEAD (bb
) = NEXT_INSN (last
);
5176 if (NOTE_P (BB_HEAD (bb
)))
5177 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5178 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5180 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5183 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5185 NOTE_BASIC_BLOCK (note
) = bb
;
5187 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5191 stmt
= gsi_stmt (gsi
);
5193 /* If this statement is a non-debug one, and we generate debug
5194 insns, then this one might be the last real use of a TERed
5195 SSA_NAME, but where there are still some debug uses further
5196 down. Expanding the current SSA name in such further debug
5197 uses by their RHS might lead to wrong debug info, as coalescing
5198 might make the operands of such RHS be placed into the same
5199 pseudo as something else. Like so:
5200 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5204 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5205 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5206 the write to a_2 would actually have clobbered the place which
5209 So, instead of that, we recognize the situation, and generate
5210 debug temporaries at the last real use of TERed SSA names:
5217 if (MAY_HAVE_DEBUG_INSNS
5219 && !is_gimple_debug (stmt
))
5225 location_t sloc
= curr_insn_location ();
5227 /* Look for SSA names that have their last use here (TERed
5228 names always have only one real use). */
5229 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5230 if ((def
= get_gimple_for_ssa_name (op
)))
5232 imm_use_iterator imm_iter
;
5233 use_operand_p use_p
;
5234 bool have_debug_uses
= false;
5236 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5238 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5240 have_debug_uses
= true;
5245 if (have_debug_uses
)
5247 /* OP is a TERed SSA name, with DEF its defining
5248 statement, and where OP is used in further debug
5249 instructions. Generate a debug temporary, and
5250 replace all uses of OP in debug insns with that
5253 tree value
= gimple_assign_rhs_to_tree (def
);
5254 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5258 set_curr_insn_location (gimple_location (def
));
5260 DECL_ARTIFICIAL (vexpr
) = 1;
5261 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5263 mode
= DECL_MODE (value
);
5265 mode
= TYPE_MODE (TREE_TYPE (value
));
5266 DECL_MODE (vexpr
) = mode
;
5268 val
= gen_rtx_VAR_LOCATION
5269 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5271 emit_debug_insn (val
);
5273 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5275 if (!gimple_debug_bind_p (debugstmt
))
5278 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5279 SET_USE (use_p
, vexpr
);
5281 update_stmt (debugstmt
);
5285 set_curr_insn_location (sloc
);
5288 currently_expanding_gimple_stmt
= stmt
;
5290 /* Expand this statement, then evaluate the resulting RTL and
5291 fixup the CFG accordingly. */
5292 if (gimple_code (stmt
) == GIMPLE_COND
)
5294 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5298 else if (gimple_debug_bind_p (stmt
))
5300 location_t sloc
= curr_insn_location ();
5301 gimple_stmt_iterator nsi
= gsi
;
5305 tree var
= gimple_debug_bind_get_var (stmt
);
5310 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5311 && TREE_CODE (var
) != LABEL_DECL
5312 && !target_for_debug_bind (var
))
5313 goto delink_debug_stmt
;
5315 if (gimple_debug_bind_has_value_p (stmt
))
5316 value
= gimple_debug_bind_get_value (stmt
);
5320 last
= get_last_insn ();
5322 set_curr_insn_location (gimple_location (stmt
));
5325 mode
= DECL_MODE (var
);
5327 mode
= TYPE_MODE (TREE_TYPE (var
));
5329 val
= gen_rtx_VAR_LOCATION
5330 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5332 emit_debug_insn (val
);
5334 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5336 /* We can't dump the insn with a TREE where an RTX
5338 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5339 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5340 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5344 /* In order not to generate too many debug temporaries,
5345 we delink all uses of debug statements we already expanded.
5346 Therefore debug statements between definition and real
5347 use of TERed SSA names will continue to use the SSA name,
5348 and not be replaced with debug temps. */
5349 delink_stmt_imm_use (stmt
);
5353 if (gsi_end_p (nsi
))
5355 stmt
= gsi_stmt (nsi
);
5356 if (!gimple_debug_bind_p (stmt
))
5360 set_curr_insn_location (sloc
);
5362 else if (gimple_debug_source_bind_p (stmt
))
5364 location_t sloc
= curr_insn_location ();
5365 tree var
= gimple_debug_source_bind_get_var (stmt
);
5366 tree value
= gimple_debug_source_bind_get_value (stmt
);
5370 last
= get_last_insn ();
5372 set_curr_insn_location (gimple_location (stmt
));
5374 mode
= DECL_MODE (var
);
5376 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5377 VAR_INIT_STATUS_UNINITIALIZED
);
5379 emit_debug_insn (val
);
5381 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5383 /* We can't dump the insn with a TREE where an RTX
5385 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5386 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5387 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5390 set_curr_insn_location (sloc
);
5394 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5396 && gimple_call_tail_p (call_stmt
)
5397 && disable_tail_calls
)
5398 gimple_call_set_tail (call_stmt
, false);
5400 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5403 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5414 def_operand_p def_p
;
5415 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5419 /* Ignore this stmt if it is in the list of
5420 replaceable expressions. */
5422 && bitmap_bit_p (SA
.values
,
5423 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5426 last
= expand_gimple_stmt (stmt
);
5427 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5432 currently_expanding_gimple_stmt
= NULL
;
5434 /* Expand implicit goto and convert goto_locus. */
5435 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5437 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5438 set_curr_insn_location (e
->goto_locus
);
5439 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5441 emit_jump (label_rtx_for_bb (e
->dest
));
5442 e
->flags
&= ~EDGE_FALLTHRU
;
5446 /* Expanded RTL can create a jump in the last instruction of block.
5447 This later might be assumed to be a jump to successor and break edge insertion.
5448 We need to insert dummy move to prevent this. PR41440. */
5449 if (single_succ_p (bb
)
5450 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5451 && (last
= get_last_insn ())
5454 rtx dummy
= gen_reg_rtx (SImode
);
5455 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5458 do_pending_stack_adjust ();
5460 /* Find the block tail. The last insn in the block is the insn
5461 before a barrier and/or table jump insn. */
5462 last
= get_last_insn ();
5463 if (BARRIER_P (last
))
5464 last
= PREV_INSN (last
);
5465 if (JUMP_TABLE_DATA_P (last
))
5466 last
= PREV_INSN (PREV_INSN (last
));
5469 update_bb_for_insn (bb
);
5475 /* Create a basic block for initialization code. */
5478 construct_init_block (void)
5480 basic_block init_block
, first_block
;
5484 /* Multiple entry points not supported yet. */
5485 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5486 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5487 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5488 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5489 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5491 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5493 /* When entry edge points to first basic block, we don't need jump,
5494 otherwise we have to jump into proper target. */
5495 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5497 tree label
= gimple_block_label (e
->dest
);
5499 emit_jump (jump_target_rtx (label
));
5503 flags
= EDGE_FALLTHRU
;
5505 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5507 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5508 init_block
->frequency
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5509 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5510 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5513 first_block
= e
->dest
;
5514 redirect_edge_succ (e
, init_block
);
5515 e
= make_edge (init_block
, first_block
, flags
);
5518 e
= make_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5519 e
->probability
= REG_BR_PROB_BASE
;
5520 e
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5522 update_bb_for_insn (init_block
);
5526 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5527 found in the block tree. */
5530 set_block_levels (tree block
, int level
)
5534 BLOCK_NUMBER (block
) = level
;
5535 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5536 block
= BLOCK_CHAIN (block
);
5540 /* Create a block containing landing pads and similar stuff. */
5543 construct_exit_block (void)
5545 rtx_insn
*head
= get_last_insn ();
5547 basic_block exit_block
;
5551 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5552 rtx_insn
*orig_end
= BB_END (prev_bb
);
5554 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5556 /* Make sure the locus is set to the end of the function, so that
5557 epilogue line numbers and warnings are set properly. */
5558 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5559 input_location
= cfun
->function_end_locus
;
5561 /* Generate rtl for function exit. */
5562 expand_function_end ();
5564 end
= get_last_insn ();
5567 /* While emitting the function end we could move end of the last basic
5569 BB_END (prev_bb
) = orig_end
;
5570 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5571 head
= NEXT_INSN (head
);
5572 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5573 bb frequency counting will be confused. Any instructions before that
5574 label are emitted for the case where PREV_BB falls through into the
5575 exit block, so append those instructions to prev_bb in that case. */
5576 if (NEXT_INSN (head
) != return_label
)
5578 while (NEXT_INSN (head
) != return_label
)
5580 if (!NOTE_P (NEXT_INSN (head
)))
5581 BB_END (prev_bb
) = NEXT_INSN (head
);
5582 head
= NEXT_INSN (head
);
5585 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
5586 exit_block
->frequency
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5587 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5588 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5591 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
5593 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
5594 if (!(e
->flags
& EDGE_ABNORMAL
))
5595 redirect_edge_succ (e
, exit_block
);
5600 e
= make_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5601 e
->probability
= REG_BR_PROB_BASE
;
5602 e
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5603 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5606 e
->count
-= e2
->count
;
5607 exit_block
->count
-= e2
->count
;
5608 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
5612 if (exit_block
->count
< 0)
5613 exit_block
->count
= 0;
5614 if (exit_block
->frequency
< 0)
5615 exit_block
->frequency
= 0;
5616 update_bb_for_insn (exit_block
);
5619 /* Helper function for discover_nonconstant_array_refs.
5620 Look for ARRAY_REF nodes with non-constant indexes and mark them
5624 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
5625 void *data ATTRIBUTE_UNUSED
)
5629 if (IS_TYPE_OR_DECL_P (t
))
5631 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5633 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5634 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
5635 && (!TREE_OPERAND (t
, 2)
5636 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5637 || (TREE_CODE (t
) == COMPONENT_REF
5638 && (!TREE_OPERAND (t
,2)
5639 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5640 || TREE_CODE (t
) == BIT_FIELD_REF
5641 || TREE_CODE (t
) == REALPART_EXPR
5642 || TREE_CODE (t
) == IMAGPART_EXPR
5643 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
5644 || CONVERT_EXPR_P (t
))
5645 t
= TREE_OPERAND (t
, 0);
5647 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5649 t
= get_base_address (t
);
5651 && DECL_MODE (t
) != BLKmode
)
5652 TREE_ADDRESSABLE (t
) = 1;
5661 /* RTL expansion is not able to compile array references with variable
5662 offsets for arrays stored in single register. Discover such
5663 expressions and mark variables as addressable to avoid this
5667 discover_nonconstant_array_refs (void)
5670 gimple_stmt_iterator gsi
;
5672 FOR_EACH_BB_FN (bb
, cfun
)
5673 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5675 gimple stmt
= gsi_stmt (gsi
);
5676 if (!is_gimple_debug (stmt
))
5677 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
5681 /* This function sets crtl->args.internal_arg_pointer to a virtual
5682 register if DRAP is needed. Local register allocator will replace
5683 virtual_incoming_args_rtx with the virtual register. */
5686 expand_stack_alignment (void)
5689 unsigned int preferred_stack_boundary
;
5691 if (! SUPPORTS_STACK_ALIGNMENT
)
5694 if (cfun
->calls_alloca
5695 || cfun
->has_nonlocal_label
5696 || crtl
->has_nonlocal_goto
)
5697 crtl
->need_drap
= true;
5699 /* Call update_stack_boundary here again to update incoming stack
5700 boundary. It may set incoming stack alignment to a different
5701 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5702 use the minimum incoming stack alignment to check if it is OK
5703 to perform sibcall optimization since sibcall optimization will
5704 only align the outgoing stack to incoming stack boundary. */
5705 if (targetm
.calls
.update_stack_boundary
)
5706 targetm
.calls
.update_stack_boundary ();
5708 /* The incoming stack frame has to be aligned at least at
5709 parm_stack_boundary. */
5710 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
5712 /* Update crtl->stack_alignment_estimated and use it later to align
5713 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5714 exceptions since callgraph doesn't collect incoming stack alignment
5716 if (cfun
->can_throw_non_call_exceptions
5717 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
5718 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
5720 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
5721 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
5722 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
5723 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
5724 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
5726 gcc_assert (crtl
->stack_alignment_needed
5727 <= crtl
->stack_alignment_estimated
);
5729 crtl
->stack_realign_needed
5730 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
5731 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
5733 crtl
->stack_realign_processed
= true;
5735 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5737 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
5738 drap_rtx
= targetm
.calls
.get_drap_rtx ();
5740 /* stack_realign_drap and drap_rtx must match. */
5741 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
5743 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5744 if (NULL
!= drap_rtx
)
5746 crtl
->args
.internal_arg_pointer
= drap_rtx
;
5748 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5750 fixup_tail_calls ();
5756 expand_main_function (void)
5758 #if (defined(INVOKE__main) \
5759 || (!defined(HAS_INIT_SECTION) \
5760 && !defined(INIT_SECTION_ASM_OP) \
5761 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5762 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
5767 /* Expand code to initialize the stack_protect_guard. This is invoked at
5768 the beginning of a function to be protected. */
5770 #ifndef HAVE_stack_protect_set
5771 # define HAVE_stack_protect_set 0
5772 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5776 stack_protect_prologue (void)
5778 tree guard_decl
= targetm
.stack_protect_guard ();
5781 x
= expand_normal (crtl
->stack_protect_guard
);
5782 y
= expand_normal (guard_decl
);
5784 /* Allow the target to copy from Y to X without leaking Y into a
5786 if (HAVE_stack_protect_set
)
5788 rtx insn
= gen_stack_protect_set (x
, y
);
5796 /* Otherwise do a straight move. */
5797 emit_move_insn (x
, y
);
5800 /* Translate the intermediate representation contained in the CFG
5801 from GIMPLE trees to RTL.
5803 We do conversion per basic block and preserve/update the tree CFG.
5804 This implies we have to do some magic as the CFG can simultaneously
5805 consist of basic blocks containing RTL and GIMPLE trees. This can
5806 confuse the CFG hooks, so be careful to not manipulate CFG during
5811 const pass_data pass_data_expand
=
5813 RTL_PASS
, /* type */
5814 "expand", /* name */
5815 OPTGROUP_NONE
, /* optinfo_flags */
5816 TV_EXPAND
, /* tv_id */
5817 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
5820 | PROP_gimple_lva
), /* properties_required */
5821 PROP_rtl
, /* properties_provided */
5822 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
5823 0, /* todo_flags_start */
5824 0, /* todo_flags_finish */
5827 class pass_expand
: public rtl_opt_pass
5830 pass_expand (gcc::context
*ctxt
)
5831 : rtl_opt_pass (pass_data_expand
, ctxt
)
5834 /* opt_pass methods: */
5835 virtual unsigned int execute (function
*);
5837 }; // class pass_expand
5840 pass_expand::execute (function
*fun
)
5842 basic_block bb
, init_block
;
5846 rtx_insn
*var_seq
, *var_ret_seq
;
5849 timevar_push (TV_OUT_OF_SSA
);
5850 rewrite_out_of_ssa (&SA
);
5851 timevar_pop (TV_OUT_OF_SSA
);
5852 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
5854 if (MAY_HAVE_DEBUG_STMTS
&& flag_tree_ter
)
5856 gimple_stmt_iterator gsi
;
5857 FOR_EACH_BB_FN (bb
, cfun
)
5858 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5859 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
5860 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
5863 /* Make sure all values used by the optimization passes have sane
5867 /* Some backends want to know that we are expanding to RTL. */
5868 currently_expanding_to_rtl
= 1;
5869 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5870 free_dominance_info (CDI_DOMINATORS
);
5872 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
5874 if (chkp_function_instrumented_p (current_function_decl
))
5875 chkp_reset_rtl_bounds ();
5877 insn_locations_init ();
5878 if (!DECL_IS_BUILTIN (current_function_decl
))
5880 /* Eventually, all FEs should explicitly set function_start_locus. */
5881 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
5882 set_curr_insn_location
5883 (DECL_SOURCE_LOCATION (current_function_decl
));
5885 set_curr_insn_location (fun
->function_start_locus
);
5888 set_curr_insn_location (UNKNOWN_LOCATION
);
5889 prologue_location
= curr_insn_location ();
5891 #ifdef INSN_SCHEDULING
5892 init_sched_attrs ();
5895 /* Make sure first insn is a note even if we don't want linenums.
5896 This makes sure the first insn will never be deleted.
5897 Also, final expects a note to appear there. */
5898 emit_note (NOTE_INSN_DELETED
);
5900 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5901 discover_nonconstant_array_refs ();
5903 targetm
.expand_to_rtl_hook ();
5904 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
5905 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
5906 crtl
->stack_alignment_estimated
= 0;
5907 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
5908 fun
->cfg
->max_jumptable_ents
= 0;
5910 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5911 of the function section at exapnsion time to predict distance of calls. */
5912 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
5914 /* Expand the variables recorded during gimple lowering. */
5915 timevar_push (TV_VAR_EXPAND
);
5918 var_ret_seq
= expand_used_vars ();
5920 var_seq
= get_insns ();
5922 timevar_pop (TV_VAR_EXPAND
);
5924 /* Honor stack protection warnings. */
5925 if (warn_stack_protect
)
5927 if (fun
->calls_alloca
)
5928 warning (OPT_Wstack_protector
,
5929 "stack protector not protecting local variables: "
5930 "variable length buffer");
5931 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
5932 warning (OPT_Wstack_protector
,
5933 "stack protector not protecting function: "
5934 "all local arrays are less than %d bytes long",
5935 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
5938 /* Set up parameters and prepare for return, for the function. */
5939 expand_function_start (current_function_decl
);
5941 /* If we emitted any instructions for setting up the variables,
5942 emit them before the FUNCTION_START note. */
5945 emit_insn_before (var_seq
, parm_birth_insn
);
5947 /* In expand_function_end we'll insert the alloca save/restore
5948 before parm_birth_insn. We've just insertted an alloca call.
5949 Adjust the pointer to match. */
5950 parm_birth_insn
= var_seq
;
5953 /* Now that we also have the parameter RTXs, copy them over to our
5955 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
5957 tree var
= SSA_NAME_VAR (partition_to_var (SA
.map
, i
));
5959 if (TREE_CODE (var
) != VAR_DECL
5960 && !SA
.partition_to_pseudo
[i
])
5961 SA
.partition_to_pseudo
[i
] = DECL_RTL_IF_SET (var
);
5962 gcc_assert (SA
.partition_to_pseudo
[i
]);
5964 /* If this decl was marked as living in multiple places, reset
5965 this now to NULL. */
5966 if (DECL_RTL_IF_SET (var
) == pc_rtx
)
5967 SET_DECL_RTL (var
, NULL
);
5969 /* Some RTL parts really want to look at DECL_RTL(x) when x
5970 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5971 SET_DECL_RTL here making this available, but that would mean
5972 to select one of the potentially many RTLs for one DECL. Instead
5973 of doing that we simply reset the MEM_EXPR of the RTL in question,
5974 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5975 if (!DECL_RTL_SET_P (var
))
5977 if (MEM_P (SA
.partition_to_pseudo
[i
]))
5978 set_mem_expr (SA
.partition_to_pseudo
[i
], NULL
);
5982 /* If we have a class containing differently aligned pointers
5983 we need to merge those into the corresponding RTL pointer
5985 for (i
= 1; i
< num_ssa_names
; i
++)
5987 tree name
= ssa_name (i
);
5992 /* We might have generated new SSA names in
5993 update_alias_info_with_stack_vars. They will have a NULL
5994 defining statements, and won't be part of the partitioning,
5996 || !SSA_NAME_DEF_STMT (name
))
5998 part
= var_to_partition (SA
.map
, name
);
5999 if (part
== NO_PARTITION
)
6002 /* Adjust all partition members to get the underlying decl of
6003 the representative which we might have created in expand_one_var. */
6004 if (SSA_NAME_VAR (name
) == NULL_TREE
)
6006 tree leader
= partition_to_var (SA
.map
, part
);
6007 gcc_assert (SSA_NAME_VAR (leader
) != NULL_TREE
);
6008 replace_ssa_name_symbol (name
, SSA_NAME_VAR (leader
));
6010 if (!POINTER_TYPE_P (TREE_TYPE (name
)))
6013 r
= SA
.partition_to_pseudo
[part
];
6015 mark_reg_pointer (r
, get_pointer_alignment (name
));
6018 /* If this function is `main', emit a call to `__main'
6019 to run global initializers, etc. */
6020 if (DECL_NAME (current_function_decl
)
6021 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6022 && DECL_FILE_SCOPE_P (current_function_decl
))
6023 expand_main_function ();
6025 /* Initialize the stack_protect_guard field. This must happen after the
6026 call to __main (if any) so that the external decl is initialized. */
6027 if (crtl
->stack_protect_guard
)
6028 stack_protect_prologue ();
6030 expand_phi_nodes (&SA
);
6032 /* Register rtl specific functions for cfg. */
6033 rtl_register_cfg_hooks ();
6035 init_block
= construct_init_block ();
6037 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6038 remaining edges later. */
6039 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6040 e
->flags
&= ~EDGE_EXECUTABLE
;
6042 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6043 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6045 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6047 if (MAY_HAVE_DEBUG_INSNS
)
6048 expand_debug_locations ();
6050 if (deep_ter_debug_map
)
6052 delete deep_ter_debug_map
;
6053 deep_ter_debug_map
= NULL
;
6056 /* Free stuff we no longer need after GIMPLE optimizations. */
6057 free_dominance_info (CDI_DOMINATORS
);
6058 free_dominance_info (CDI_POST_DOMINATORS
);
6059 delete_tree_cfg_annotations ();
6061 timevar_push (TV_OUT_OF_SSA
);
6062 finish_out_of_ssa (&SA
);
6063 timevar_pop (TV_OUT_OF_SSA
);
6065 timevar_push (TV_POST_EXPAND
);
6066 /* We are no longer in SSA form. */
6067 fun
->gimple_df
->in_ssa_p
= false;
6068 loops_state_clear (LOOP_CLOSED_SSA
);
6070 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6071 conservatively to true until they are all profile aware. */
6072 delete lab_rtx_for_bb
;
6075 construct_exit_block ();
6076 insn_locations_finalize ();
6080 rtx_insn
*after
= return_label
;
6081 rtx_insn
*next
= NEXT_INSN (after
);
6082 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6084 emit_insn_after (var_ret_seq
, after
);
6087 /* Zap the tree EH table. */
6088 set_eh_throw_stmt_table (fun
, NULL
);
6090 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6091 split edges which edge insertions might do. */
6092 rebuild_jump_labels (get_insns ());
6094 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
6095 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6099 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6103 rebuild_jump_labels_chain (e
->insns
.r
);
6104 /* Put insns after parm birth, but before
6105 NOTE_INSNS_FUNCTION_BEG. */
6106 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
6107 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6109 rtx_insn
*insns
= e
->insns
.r
;
6111 if (NOTE_P (parm_birth_insn
)
6112 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6113 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6115 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6118 commit_one_edge_insertion (e
);
6125 /* We're done expanding trees to RTL. */
6126 currently_expanding_to_rtl
= 0;
6128 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6129 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6133 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6135 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6136 e
->flags
&= ~EDGE_EXECUTABLE
;
6138 /* At the moment not all abnormal edges match the RTL
6139 representation. It is safe to remove them here as
6140 find_many_sub_basic_blocks will rediscover them.
6141 In the future we should get this fixed properly. */
6142 if ((e
->flags
& EDGE_ABNORMAL
)
6143 && !(e
->flags
& EDGE_SIBCALL
))
6150 blocks
= sbitmap_alloc (last_basic_block_for_fn (fun
));
6151 bitmap_ones (blocks
);
6152 find_many_sub_basic_blocks (blocks
);
6153 sbitmap_free (blocks
);
6154 purge_all_dead_edges ();
6156 expand_stack_alignment ();
6158 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6160 if (crtl
->tail_call_emit
)
6161 fixup_tail_calls ();
6163 /* After initial rtl generation, call back to finish generating
6164 exception support code. We need to do this before cleaning up
6165 the CFG as the code does not expect dead landing pads. */
6166 if (fun
->eh
->region_tree
!= NULL
)
6167 finish_eh_generation ();
6169 /* Remove unreachable blocks, otherwise we cannot compute dominators
6170 which are needed for loop state verification. As a side-effect
6171 this also compacts blocks.
6172 ??? We cannot remove trivially dead insns here as for example
6173 the DRAP reg on i?86 is not magically live at this point.
6174 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6175 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6177 #ifdef ENABLE_CHECKING
6178 verify_flow_info ();
6181 /* Initialize pseudos allocated for hard registers. */
6182 emit_initial_value_sets ();
6184 /* And finally unshare all RTL. */
6187 /* There's no need to defer outputting this function any more; we
6188 know we want to output it. */
6189 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6191 /* Now that we're done expanding trees to RTL, we shouldn't have any
6192 more CONCATs anywhere. */
6193 generating_concat_p
= 0;
6198 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6199 /* And the pass manager will dump RTL for us. */
6202 /* If we're emitting a nested function, make sure its parent gets
6203 emitted as well. Doing otherwise confuses debug info. */
6206 for (parent
= DECL_CONTEXT (current_function_decl
);
6207 parent
!= NULL_TREE
;
6208 parent
= get_containing_scope (parent
))
6209 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6210 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6213 /* We are now committed to emitting code for this function. Do any
6214 preparation, such as emitting abstract debug info for the inline
6215 before it gets mangled by optimization. */
6216 if (cgraph_function_possibly_inlined_p (current_function_decl
))
6217 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
6219 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6221 /* After expanding, the return labels are no longer needed. */
6222 return_label
= NULL
;
6223 naked_return_label
= NULL
;
6225 /* After expanding, the tm_restart map is no longer needed. */
6226 if (fun
->gimple_df
->tm_restart
)
6227 fun
->gimple_df
->tm_restart
= NULL
;
6229 /* Tag the blocks with a depth number so that change_scope can find
6230 the common parent easily. */
6231 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6232 default_rtl_profile ();
6234 timevar_pop (TV_POST_EXPAND
);
6242 make_pass_expand (gcc::context
*ctxt
)
6244 return new pass_expand (ctxt
);