1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
65 #include "tree-inline.h"
66 #include "value-prof.h"
67 #include "tree-ssa-live.h"
68 #include "tree-outof-ssa.h"
70 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "stringpool.h"
74 #include "tree-ssa-address.h"
77 #include "tree-chkp.h"
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
85 #define NAME__MAIN "__main"
88 /* This variable holds information helping the rewriting of SSA trees
92 /* This variable holds the currently expanded gimple statement for purposes
93 of comminucating the profile info to the builtin expanders. */
94 gimple
*currently_expanding_gimple_stmt
;
96 static rtx
expand_debug_expr (tree
);
98 static bool defer_stack_allocation (tree
, bool);
100 static void record_alignment_for_reg_var (unsigned int);
102 /* Return an expression tree corresponding to the RHS of GIMPLE
106 gimple_assign_rhs_to_tree (gimple
*stmt
)
109 enum gimple_rhs_class grhs_class
;
111 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
113 if (grhs_class
== GIMPLE_TERNARY_RHS
)
114 t
= build3 (gimple_assign_rhs_code (stmt
),
115 TREE_TYPE (gimple_assign_lhs (stmt
)),
116 gimple_assign_rhs1 (stmt
),
117 gimple_assign_rhs2 (stmt
),
118 gimple_assign_rhs3 (stmt
));
119 else if (grhs_class
== GIMPLE_BINARY_RHS
)
120 t
= build2 (gimple_assign_rhs_code (stmt
),
121 TREE_TYPE (gimple_assign_lhs (stmt
)),
122 gimple_assign_rhs1 (stmt
),
123 gimple_assign_rhs2 (stmt
));
124 else if (grhs_class
== GIMPLE_UNARY_RHS
)
125 t
= build1 (gimple_assign_rhs_code (stmt
),
126 TREE_TYPE (gimple_assign_lhs (stmt
)),
127 gimple_assign_rhs1 (stmt
));
128 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
130 t
= gimple_assign_rhs1 (stmt
);
131 /* Avoid modifying this tree in place below. */
132 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
133 && gimple_location (stmt
) != EXPR_LOCATION (t
))
134 || (gimple_block (stmt
)
135 && currently_expanding_to_rtl
142 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
143 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
149 #ifndef STACK_ALIGNMENT_NEEDED
150 #define STACK_ALIGNMENT_NEEDED 1
153 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
155 /* Choose either CUR or NEXT as the leader DECL for a partition.
156 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
157 out of the same user variable being in multiple partitions (this is
158 less likely for compiler-introduced temps). */
161 leader_merge (tree cur
, tree next
)
163 if (cur
== NULL
|| cur
== next
)
166 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
169 if (DECL_P (next
) && DECL_IGNORED_P (next
))
175 /* Associate declaration T with storage space X. If T is no
176 SSA name this is exactly SET_DECL_RTL, otherwise make the
177 partition of T associated with X. */
179 set_rtl (tree t
, rtx x
)
181 gcc_checking_assert (!x
182 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
183 || (use_register_for_decl (t
)
185 || (GET_CODE (x
) == CONCAT
186 && (REG_P (XEXP (x
, 0))
187 || SUBREG_P (XEXP (x
, 0)))
188 && (REG_P (XEXP (x
, 1))
189 || SUBREG_P (XEXP (x
, 1))))
190 /* We need to accept PARALLELs for RESUT_DECLs
191 because of vector types with BLKmode returned
192 in multiple registers, but they are supposed
193 to be uncoalesced. */
194 || (GET_CODE (x
) == PARALLEL
196 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
197 && (GET_MODE (x
) == BLKmode
198 || !flag_tree_coalesce_vars
)))
199 : (MEM_P (x
) || x
== pc_rtx
200 || (GET_CODE (x
) == CONCAT
201 && MEM_P (XEXP (x
, 0))
202 && MEM_P (XEXP (x
, 1))))));
203 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
204 RESULT_DECLs has the expected mode. For memory, we accept
205 unpromoted modes, since that's what we're likely to get. For
206 PARM_DECLs and RESULT_DECLs, we'll have been called by
207 set_parm_rtl, which will give us the default def, so we don't
208 have to compute it ourselves. For RESULT_DECLs, we accept mode
209 mismatches too, as long as we have BLKmode or are not coalescing
210 across variables, so that we don't reject BLKmode PARALLELs or
212 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
214 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
215 && (promote_ssa_mode (t
, NULL
) == BLKmode
216 || !flag_tree_coalesce_vars
))
217 || !use_register_for_decl (t
)
218 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
223 tree cur
= NULL_TREE
;
231 else if (SUBREG_P (xm
))
233 gcc_assert (subreg_lowpart_p (xm
));
234 xm
= SUBREG_REG (xm
);
237 else if (GET_CODE (xm
) == CONCAT
)
242 else if (GET_CODE (xm
) == PARALLEL
)
244 xm
= XVECEXP (xm
, 0, 0);
245 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
249 else if (xm
== pc_rtx
)
254 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
259 set_mem_attributes (x
,
260 next
&& TREE_CODE (next
) == SSA_NAME
264 set_reg_attrs_for_decl_rtl (next
, x
);
268 if (TREE_CODE (t
) == SSA_NAME
)
270 int part
= var_to_partition (SA
.map
, t
);
271 if (part
!= NO_PARTITION
)
273 if (SA
.partition_to_pseudo
[part
])
274 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
275 else if (x
!= pc_rtx
)
276 SA
.partition_to_pseudo
[part
] = x
;
278 /* For the benefit of debug information at -O0 (where
279 vartracking doesn't run) record the place also in the base
280 DECL. For PARMs and RESULTs, do so only when setting the
282 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
283 && (VAR_P (SSA_NAME_VAR (t
))
284 || SSA_NAME_IS_DEFAULT_DEF (t
)))
286 tree var
= SSA_NAME_VAR (t
);
287 /* If we don't yet have something recorded, just record it now. */
288 if (!DECL_RTL_SET_P (var
))
289 SET_DECL_RTL (var
, x
);
290 /* If we have it set already to "multiple places" don't
292 else if (DECL_RTL (var
) == pc_rtx
)
294 /* If we have something recorded and it's not the same place
295 as we want to record now, we have multiple partitions for the
296 same base variable, with different places. We can't just
297 randomly chose one, hence we have to say that we don't know.
298 This only happens with optimization, and there var-tracking
299 will figure out the right thing. */
300 else if (DECL_RTL (var
) != x
)
301 SET_DECL_RTL (var
, pc_rtx
);
308 /* This structure holds data relevant to one variable that will be
309 placed in a stack slot. */
315 /* Initially, the size of the variable. Later, the size of the partition,
316 if this variable becomes it's partition's representative. */
319 /* The *byte* alignment required for this variable. Or as, with the
320 size, the alignment for this partition. */
323 /* The partition representative. */
324 size_t representative
;
326 /* The next stack variable in the partition, or EOC. */
329 /* The numbers of conflicting stack variables. */
333 #define EOC ((size_t)-1)
335 /* We have an array of such objects while deciding allocation. */
336 static struct stack_var
*stack_vars
;
337 static size_t stack_vars_alloc
;
338 static size_t stack_vars_num
;
339 static hash_map
<tree
, size_t> *decl_to_stack_part
;
341 /* Conflict bitmaps go on this obstack. This allows us to destroy
342 all of them in one big sweep. */
343 static bitmap_obstack stack_var_bitmap_obstack
;
345 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
346 is non-decreasing. */
347 static size_t *stack_vars_sorted
;
349 /* The phase of the stack frame. This is the known misalignment of
350 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
351 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
352 static int frame_phase
;
354 /* Used during expand_used_vars to remember if we saw any decls for
355 which we'd like to enable stack smashing protection. */
356 static bool has_protected_decls
;
358 /* Used during expand_used_vars. Remember if we say a character buffer
359 smaller than our cutoff threshold. Used for -Wstack-protector. */
360 static bool has_short_buffer
;
362 /* Compute the byte alignment to use for DECL. Ignore alignment
363 we can't do with expected alignment of the stack boundary. */
366 align_local_variable (tree decl
)
370 if (TREE_CODE (decl
) == SSA_NAME
)
371 align
= TYPE_ALIGN (TREE_TYPE (decl
));
374 align
= LOCAL_DECL_ALIGNMENT (decl
);
375 SET_DECL_ALIGN (decl
, align
);
377 return align
/ BITS_PER_UNIT
;
380 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
381 down otherwise. Return truncated BASE value. */
383 static inline unsigned HOST_WIDE_INT
384 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
386 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
389 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
390 Return the frame offset. */
393 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
395 HOST_WIDE_INT offset
, new_frame_offset
;
397 if (FRAME_GROWS_DOWNWARD
)
400 = align_base (frame_offset
- frame_phase
- size
,
401 align
, false) + frame_phase
;
402 offset
= new_frame_offset
;
407 = align_base (frame_offset
- frame_phase
, align
, true) + frame_phase
;
408 offset
= new_frame_offset
;
409 new_frame_offset
+= size
;
411 frame_offset
= new_frame_offset
;
413 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
414 frame_offset
= offset
= 0;
419 /* Accumulate DECL into STACK_VARS. */
422 add_stack_var (tree decl
)
426 if (stack_vars_num
>= stack_vars_alloc
)
428 if (stack_vars_alloc
)
429 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
431 stack_vars_alloc
= 32;
433 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
435 if (!decl_to_stack_part
)
436 decl_to_stack_part
= new hash_map
<tree
, size_t>;
438 v
= &stack_vars
[stack_vars_num
];
439 decl_to_stack_part
->put (decl
, stack_vars_num
);
442 tree size
= TREE_CODE (decl
) == SSA_NAME
443 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
444 : DECL_SIZE_UNIT (decl
);
445 v
->size
= tree_to_uhwi (size
);
446 /* Ensure that all variables have size, so that &a != &b for any two
447 variables that are simultaneously live. */
450 v
->alignb
= align_local_variable (decl
);
451 /* An alignment of zero can mightily confuse us later. */
452 gcc_assert (v
->alignb
!= 0);
454 /* All variables are initially in their own partition. */
455 v
->representative
= stack_vars_num
;
458 /* All variables initially conflict with no other. */
461 /* Ensure that this decl doesn't get put onto the list twice. */
462 set_rtl (decl
, pc_rtx
);
467 /* Make the decls associated with luid's X and Y conflict. */
470 add_stack_var_conflict (size_t x
, size_t y
)
472 struct stack_var
*a
= &stack_vars
[x
];
473 struct stack_var
*b
= &stack_vars
[y
];
475 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
477 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
478 bitmap_set_bit (a
->conflicts
, y
);
479 bitmap_set_bit (b
->conflicts
, x
);
482 /* Check whether the decls associated with luid's X and Y conflict. */
485 stack_var_conflict_p (size_t x
, size_t y
)
487 struct stack_var
*a
= &stack_vars
[x
];
488 struct stack_var
*b
= &stack_vars
[y
];
491 /* Partitions containing an SSA name result from gimple registers
492 with things like unsupported modes. They are top-level and
493 hence conflict with everything else. */
494 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
497 if (!a
->conflicts
|| !b
->conflicts
)
499 return bitmap_bit_p (a
->conflicts
, y
);
502 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
503 enter its partition number into bitmap DATA. */
506 visit_op (gimple
*, tree op
, tree
, void *data
)
508 bitmap active
= (bitmap
)data
;
509 op
= get_base_address (op
);
512 && DECL_RTL_IF_SET (op
) == pc_rtx
)
514 size_t *v
= decl_to_stack_part
->get (op
);
516 bitmap_set_bit (active
, *v
);
521 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
522 record conflicts between it and all currently active other partitions
526 visit_conflict (gimple
*, tree op
, tree
, void *data
)
528 bitmap active
= (bitmap
)data
;
529 op
= get_base_address (op
);
532 && DECL_RTL_IF_SET (op
) == pc_rtx
)
534 size_t *v
= decl_to_stack_part
->get (op
);
535 if (v
&& bitmap_set_bit (active
, *v
))
540 gcc_assert (num
< stack_vars_num
);
541 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
542 add_stack_var_conflict (num
, i
);
548 /* Helper routine for add_scope_conflicts, calculating the active partitions
549 at the end of BB, leaving the result in WORK. We're called to generate
550 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
554 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
558 gimple_stmt_iterator gsi
;
559 walk_stmt_load_store_addr_fn visit
;
562 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
563 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
567 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
569 gimple
*stmt
= gsi_stmt (gsi
);
570 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
572 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
574 gimple
*stmt
= gsi_stmt (gsi
);
576 if (gimple_clobber_p (stmt
))
578 tree lhs
= gimple_assign_lhs (stmt
);
580 /* Nested function lowering might introduce LHSs
581 that are COMPONENT_REFs. */
584 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
585 && (v
= decl_to_stack_part
->get (lhs
)))
586 bitmap_clear_bit (work
, *v
);
588 else if (!is_gimple_debug (stmt
))
591 && visit
== visit_op
)
593 /* If this is the first real instruction in this BB we need
594 to add conflicts for everything live at this point now.
595 Unlike classical liveness for named objects we can't
596 rely on seeing a def/use of the names we're interested in.
597 There might merely be indirect loads/stores. We'd not add any
598 conflicts for such partitions. */
601 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
603 struct stack_var
*a
= &stack_vars
[i
];
605 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
606 bitmap_ior_into (a
->conflicts
, work
);
608 visit
= visit_conflict
;
610 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
615 /* Generate stack partition conflicts between all partitions that are
616 simultaneously live. */
619 add_scope_conflicts (void)
623 bitmap work
= BITMAP_ALLOC (NULL
);
627 /* We approximate the live range of a stack variable by taking the first
628 mention of its name as starting point(s), and by the end-of-scope
629 death clobber added by gimplify as ending point(s) of the range.
630 This overapproximates in the case we for instance moved an address-taken
631 operation upward, without also moving a dereference to it upwards.
632 But it's conservatively correct as a variable never can hold values
633 before its name is mentioned at least once.
635 We then do a mostly classical bitmap liveness algorithm. */
637 FOR_ALL_BB_FN (bb
, cfun
)
638 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
640 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
641 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
648 for (i
= 0; i
< n_bbs
; i
++)
651 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
652 active
= (bitmap
)bb
->aux
;
653 add_scope_conflicts_1 (bb
, work
, false);
654 if (bitmap_ior_into (active
, work
))
659 FOR_EACH_BB_FN (bb
, cfun
)
660 add_scope_conflicts_1 (bb
, work
, true);
664 FOR_ALL_BB_FN (bb
, cfun
)
665 BITMAP_FREE (bb
->aux
);
668 /* A subroutine of partition_stack_vars. A comparison function for qsort,
669 sorting an array of indices by the properties of the object. */
672 stack_var_cmp (const void *a
, const void *b
)
674 size_t ia
= *(const size_t *)a
;
675 size_t ib
= *(const size_t *)b
;
676 unsigned int aligna
= stack_vars
[ia
].alignb
;
677 unsigned int alignb
= stack_vars
[ib
].alignb
;
678 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
679 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
680 tree decla
= stack_vars
[ia
].decl
;
681 tree declb
= stack_vars
[ib
].decl
;
683 unsigned int uida
, uidb
;
685 /* Primary compare on "large" alignment. Large comes first. */
686 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
687 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
688 if (largea
!= largeb
)
689 return (int)largeb
- (int)largea
;
691 /* Secondary compare on size, decreasing */
697 /* Tertiary compare on true alignment, decreasing. */
703 /* Final compare on ID for sort stability, increasing.
704 Two SSA names are compared by their version, SSA names come before
705 non-SSA names, and two normal decls are compared by their DECL_UID. */
706 if (TREE_CODE (decla
) == SSA_NAME
)
708 if (TREE_CODE (declb
) == SSA_NAME
)
709 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
713 else if (TREE_CODE (declb
) == SSA_NAME
)
716 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
724 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
725 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
727 /* If the points-to solution *PI points to variables that are in a partition
728 together with other variables add all partition members to the pointed-to
732 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
733 part_hashmap
*decls_to_partitions
,
734 hash_set
<bitmap
> *visited
, bitmap temp
)
742 /* The pointed-to vars bitmap is shared, it is enough to
744 || visited
->add (pt
->vars
))
749 /* By using a temporary bitmap to store all members of the partitions
750 we have to add we make sure to visit each of the partitions only
752 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
754 || !bitmap_bit_p (temp
, i
))
755 && (part
= decls_to_partitions
->get (i
)))
756 bitmap_ior_into (temp
, *part
);
757 if (!bitmap_empty_p (temp
))
758 bitmap_ior_into (pt
->vars
, temp
);
761 /* Update points-to sets based on partition info, so we can use them on RTL.
762 The bitmaps representing stack partitions will be saved until expand,
763 where partitioned decls used as bases in memory expressions will be
767 update_alias_info_with_stack_vars (void)
769 part_hashmap
*decls_to_partitions
= NULL
;
771 tree var
= NULL_TREE
;
773 for (i
= 0; i
< stack_vars_num
; i
++)
777 struct ptr_info_def
*pi
;
779 /* Not interested in partitions with single variable. */
780 if (stack_vars
[i
].representative
!= i
781 || stack_vars
[i
].next
== EOC
)
784 if (!decls_to_partitions
)
786 decls_to_partitions
= new part_hashmap
;
787 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
790 /* Create an SSA_NAME that points to the partition for use
791 as base during alias-oracle queries on RTL for bases that
792 have been partitioned. */
793 if (var
== NULL_TREE
)
794 var
= create_tmp_var (ptr_type_node
);
795 name
= make_ssa_name (var
);
797 /* Create bitmaps representing partitions. They will be used for
798 points-to sets later, so use GGC alloc. */
799 part
= BITMAP_GGC_ALLOC ();
800 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
802 tree decl
= stack_vars
[j
].decl
;
803 unsigned int uid
= DECL_PT_UID (decl
);
804 bitmap_set_bit (part
, uid
);
805 decls_to_partitions
->put (uid
, part
);
806 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
807 if (TREE_ADDRESSABLE (decl
))
808 TREE_ADDRESSABLE (name
) = 1;
811 /* Make the SSA name point to all partition members. */
812 pi
= get_ptr_info (name
);
813 pt_solution_set (&pi
->pt
, part
, false);
816 /* Make all points-to sets that contain one member of a partition
817 contain all members of the partition. */
818 if (decls_to_partitions
)
822 hash_set
<bitmap
> visited
;
823 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
825 FOR_EACH_SSA_NAME (i
, name
, cfun
)
827 struct ptr_info_def
*pi
;
829 if (POINTER_TYPE_P (TREE_TYPE (name
))
830 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
831 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
835 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
836 decls_to_partitions
, &visited
, temp
);
838 delete decls_to_partitions
;
843 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
844 partitioning algorithm. Partitions A and B are known to be non-conflicting.
845 Merge them into a single partition A. */
848 union_stack_vars (size_t a
, size_t b
)
850 struct stack_var
*vb
= &stack_vars
[b
];
854 gcc_assert (stack_vars
[b
].next
== EOC
);
855 /* Add B to A's partition. */
856 stack_vars
[b
].next
= stack_vars
[a
].next
;
857 stack_vars
[b
].representative
= a
;
858 stack_vars
[a
].next
= b
;
860 /* Update the required alignment of partition A to account for B. */
861 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
862 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
864 /* Update the interference graph and merge the conflicts. */
867 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
868 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
869 BITMAP_FREE (vb
->conflicts
);
873 /* A subroutine of expand_used_vars. Binpack the variables into
874 partitions constrained by the interference graph. The overall
875 algorithm used is as follows:
877 Sort the objects by size in descending order.
882 Look for the largest non-conflicting object B with size <= S.
889 partition_stack_vars (void)
891 size_t si
, sj
, n
= stack_vars_num
;
893 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
894 for (si
= 0; si
< n
; ++si
)
895 stack_vars_sorted
[si
] = si
;
900 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
902 for (si
= 0; si
< n
; ++si
)
904 size_t i
= stack_vars_sorted
[si
];
905 unsigned int ialign
= stack_vars
[i
].alignb
;
906 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
908 /* Ignore objects that aren't partition representatives. If we
909 see a var that is not a partition representative, it must
910 have been merged earlier. */
911 if (stack_vars
[i
].representative
!= i
)
914 for (sj
= si
+ 1; sj
< n
; ++sj
)
916 size_t j
= stack_vars_sorted
[sj
];
917 unsigned int jalign
= stack_vars
[j
].alignb
;
918 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
920 /* Ignore objects that aren't partition representatives. */
921 if (stack_vars
[j
].representative
!= j
)
924 /* Do not mix objects of "small" (supported) alignment
925 and "large" (unsupported) alignment. */
926 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
927 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
930 /* For Address Sanitizer do not mix objects with different
931 sizes, as the shorter vars wouldn't be adequately protected.
932 Don't do that for "large" (unsupported) alignment objects,
933 those aren't protected anyway. */
934 if ((asan_sanitize_stack_p ())
936 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
939 /* Ignore conflicting objects. */
940 if (stack_var_conflict_p (i
, j
))
943 /* UNION the objects, placing J at OFFSET. */
944 union_stack_vars (i
, j
);
948 update_alias_info_with_stack_vars ();
951 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
954 dump_stack_var_partition (void)
956 size_t si
, i
, j
, n
= stack_vars_num
;
958 for (si
= 0; si
< n
; ++si
)
960 i
= stack_vars_sorted
[si
];
962 /* Skip variables that aren't partition representatives, for now. */
963 if (stack_vars
[i
].representative
!= i
)
966 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
967 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
968 stack_vars
[i
].alignb
);
970 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
972 fputc ('\t', dump_file
);
973 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
975 fputc ('\n', dump_file
);
979 /* Assign rtl to DECL at BASE + OFFSET. */
982 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
983 HOST_WIDE_INT offset
)
988 /* If this fails, we've overflowed the stack frame. Error nicely? */
989 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
991 x
= plus_constant (Pmode
, base
, offset
);
992 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
993 ? TYPE_MODE (TREE_TYPE (decl
))
994 : DECL_MODE (SSAVAR (decl
)), x
);
996 if (TREE_CODE (decl
) != SSA_NAME
)
998 /* Set alignment we actually gave this decl if it isn't an SSA name.
999 If it is we generate stack slots only accidentally so it isn't as
1000 important, we'll simply use the alignment that is already set. */
1001 if (base
== virtual_stack_vars_rtx
)
1002 offset
-= frame_phase
;
1003 align
= least_bit_hwi (offset
);
1004 align
*= BITS_PER_UNIT
;
1005 if (align
== 0 || align
> base_align
)
1008 /* One would think that we could assert that we're not decreasing
1009 alignment here, but (at least) the i386 port does exactly this
1010 via the MINIMUM_ALIGNMENT hook. */
1012 SET_DECL_ALIGN (decl
, align
);
1013 DECL_USER_ALIGN (decl
) = 0;
1019 struct stack_vars_data
1021 /* Vector of offset pairs, always end of some padding followed
1022 by start of the padding that needs Address Sanitizer protection.
1023 The vector is in reversed, highest offset pairs come first. */
1024 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1026 /* Vector of partition representative decls in between the paddings. */
1027 auto_vec
<tree
> asan_decl_vec
;
1029 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1032 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1033 unsigned int asan_alignb
;
1036 /* A subroutine of expand_used_vars. Give each partition representative
1037 a unique location within the stack frame. Update each partition member
1038 with that location. */
1041 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
1043 size_t si
, i
, j
, n
= stack_vars_num
;
1044 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
1045 rtx large_base
= NULL
;
1046 unsigned large_align
= 0;
1047 bool large_allocation_done
= false;
1050 /* Determine if there are any variables requiring "large" alignment.
1051 Since these are dynamically allocated, we only process these if
1052 no predicate involved. */
1053 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1054 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1056 /* Find the total size of these variables. */
1057 for (si
= 0; si
< n
; ++si
)
1061 i
= stack_vars_sorted
[si
];
1062 alignb
= stack_vars
[i
].alignb
;
1064 /* All "large" alignment decls come before all "small" alignment
1065 decls, but "large" alignment decls are not sorted based on
1066 their alignment. Increase large_align to track the largest
1067 required alignment. */
1068 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1069 large_align
= alignb
* BITS_PER_UNIT
;
1071 /* Stop when we get to the first decl with "small" alignment. */
1072 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1075 /* Skip variables that aren't partition representatives. */
1076 if (stack_vars
[i
].representative
!= i
)
1079 /* Skip variables that have already had rtl assigned. See also
1080 add_stack_var where we perpetrate this pc_rtx hack. */
1081 decl
= stack_vars
[i
].decl
;
1082 if (TREE_CODE (decl
) == SSA_NAME
1083 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1084 : DECL_RTL (decl
) != pc_rtx
)
1087 large_size
+= alignb
- 1;
1088 large_size
&= -(HOST_WIDE_INT
)alignb
;
1089 large_size
+= stack_vars
[i
].size
;
1093 for (si
= 0; si
< n
; ++si
)
1096 unsigned base_align
, alignb
;
1097 HOST_WIDE_INT offset
;
1099 i
= stack_vars_sorted
[si
];
1101 /* Skip variables that aren't partition representatives, for now. */
1102 if (stack_vars
[i
].representative
!= i
)
1105 /* Skip variables that have already had rtl assigned. See also
1106 add_stack_var where we perpetrate this pc_rtx hack. */
1107 decl
= stack_vars
[i
].decl
;
1108 if (TREE_CODE (decl
) == SSA_NAME
1109 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1110 : DECL_RTL (decl
) != pc_rtx
)
1113 /* Check the predicate to see whether this variable should be
1114 allocated in this pass. */
1115 if (pred
&& !pred (i
))
1118 alignb
= stack_vars
[i
].alignb
;
1119 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1121 base
= virtual_stack_vars_rtx
;
1122 if ((asan_sanitize_stack_p ())
1125 HOST_WIDE_INT prev_offset
1126 = align_base (frame_offset
,
1127 MAX (alignb
, ASAN_RED_ZONE_SIZE
),
1128 !FRAME_GROWS_DOWNWARD
);
1129 tree repr_decl
= NULL_TREE
;
1131 = alloc_stack_frame_space (stack_vars
[i
].size
1132 + ASAN_RED_ZONE_SIZE
,
1133 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
1135 data
->asan_vec
.safe_push (prev_offset
);
1136 data
->asan_vec
.safe_push (offset
+ stack_vars
[i
].size
);
1137 /* Find best representative of the partition.
1138 Prefer those with DECL_NAME, even better
1139 satisfying asan_protect_stack_decl predicate. */
1140 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1141 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1142 && DECL_NAME (stack_vars
[j
].decl
))
1144 repr_decl
= stack_vars
[j
].decl
;
1147 else if (repr_decl
== NULL_TREE
1148 && DECL_P (stack_vars
[j
].decl
)
1149 && DECL_NAME (stack_vars
[j
].decl
))
1150 repr_decl
= stack_vars
[j
].decl
;
1151 if (repr_decl
== NULL_TREE
)
1152 repr_decl
= stack_vars
[i
].decl
;
1153 data
->asan_decl_vec
.safe_push (repr_decl
);
1154 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1155 if (data
->asan_base
== NULL
)
1156 data
->asan_base
= gen_reg_rtx (Pmode
);
1157 base
= data
->asan_base
;
1159 if (!STRICT_ALIGNMENT
)
1160 base_align
= crtl
->max_used_stack_slot_alignment
;
1162 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1163 GET_MODE_ALIGNMENT (SImode
)
1164 << ASAN_SHADOW_SHIFT
);
1168 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1169 base_align
= crtl
->max_used_stack_slot_alignment
;
1174 /* Large alignment is only processed in the last pass. */
1178 /* If there were any variables requiring "large" alignment, allocate
1180 if (large_size
> 0 && ! large_allocation_done
)
1182 HOST_WIDE_INT loffset
;
1183 rtx large_allocsize
;
1185 large_allocsize
= GEN_INT (large_size
);
1186 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1187 loffset
= alloc_stack_frame_space
1188 (INTVAL (large_allocsize
),
1189 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1190 large_base
= get_dynamic_stack_base (loffset
, large_align
);
1191 large_allocation_done
= true;
1193 gcc_assert (large_base
!= NULL
);
1195 large_alloc
+= alignb
- 1;
1196 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
1197 offset
= large_alloc
;
1198 large_alloc
+= stack_vars
[i
].size
;
1201 base_align
= large_align
;
1204 /* Create rtl for each variable based on their location within the
1206 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1208 expand_one_stack_var_at (stack_vars
[j
].decl
,
1214 gcc_assert (large_alloc
== large_size
);
1217 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1218 static HOST_WIDE_INT
1219 account_stack_vars (void)
1221 size_t si
, j
, i
, n
= stack_vars_num
;
1222 HOST_WIDE_INT size
= 0;
1224 for (si
= 0; si
< n
; ++si
)
1226 i
= stack_vars_sorted
[si
];
1228 /* Skip variables that aren't partition representatives, for now. */
1229 if (stack_vars
[i
].representative
!= i
)
1232 size
+= stack_vars
[i
].size
;
1233 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1234 set_rtl (stack_vars
[j
].decl
, NULL
);
1239 /* Record the RTL assignment X for the default def of PARM. */
1242 set_parm_rtl (tree parm
, rtx x
)
1244 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1245 || TREE_CODE (parm
) == RESULT_DECL
);
1247 if (x
&& !MEM_P (x
))
1249 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1250 TYPE_MODE (TREE_TYPE (parm
)),
1251 TYPE_ALIGN (TREE_TYPE (parm
)));
1253 /* If the variable alignment is very large we'll dynamicaly
1254 allocate it, which means that in-frame portion is just a
1255 pointer. ??? We've got a pseudo for sure here, do we
1256 actually dynamically allocate its spilling area if needed?
1257 ??? Isn't it a problem when POINTER_SIZE also exceeds
1258 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1259 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1260 align
= POINTER_SIZE
;
1262 record_alignment_for_reg_var (align
);
1265 tree ssa
= ssa_default_def (cfun
, parm
);
1267 return set_rtl (parm
, x
);
1269 int part
= var_to_partition (SA
.map
, ssa
);
1270 gcc_assert (part
!= NO_PARTITION
);
1272 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1273 gcc_assert (changed
);
1276 gcc_assert (DECL_RTL (parm
) == x
);
1279 /* A subroutine of expand_one_var. Called to immediately assign rtl
1280 to a variable to be allocated in the stack frame. */
1283 expand_one_stack_var_1 (tree var
)
1285 HOST_WIDE_INT size
, offset
;
1286 unsigned byte_align
;
1288 if (TREE_CODE (var
) == SSA_NAME
)
1290 tree type
= TREE_TYPE (var
);
1291 size
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1292 byte_align
= TYPE_ALIGN_UNIT (type
);
1296 size
= tree_to_uhwi (DECL_SIZE_UNIT (var
));
1297 byte_align
= align_local_variable (var
);
1300 /* We handle highly aligned variables in expand_stack_vars. */
1301 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1303 offset
= alloc_stack_frame_space (size
, byte_align
);
1305 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1306 crtl
->max_used_stack_slot_alignment
, offset
);
1309 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1310 already assigned some MEM. */
1313 expand_one_stack_var (tree var
)
1315 if (TREE_CODE (var
) == SSA_NAME
)
1317 int part
= var_to_partition (SA
.map
, var
);
1318 if (part
!= NO_PARTITION
)
1320 rtx x
= SA
.partition_to_pseudo
[part
];
1322 gcc_assert (MEM_P (x
));
1327 return expand_one_stack_var_1 (var
);
1330 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1331 that will reside in a hard register. */
1334 expand_one_hard_reg_var (tree var
)
1336 rest_of_decl_compilation (var
, 0, 0);
1339 /* Record the alignment requirements of some variable assigned to a
1343 record_alignment_for_reg_var (unsigned int align
)
1345 if (SUPPORTS_STACK_ALIGNMENT
1346 && crtl
->stack_alignment_estimated
< align
)
1348 /* stack_alignment_estimated shouldn't change after stack
1349 realign decision made */
1350 gcc_assert (!crtl
->stack_realign_processed
);
1351 crtl
->stack_alignment_estimated
= align
;
1354 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1355 So here we only make sure stack_alignment_needed >= align. */
1356 if (crtl
->stack_alignment_needed
< align
)
1357 crtl
->stack_alignment_needed
= align
;
1358 if (crtl
->max_used_stack_slot_alignment
< align
)
1359 crtl
->max_used_stack_slot_alignment
= align
;
1362 /* Create RTL for an SSA partition. */
1365 expand_one_ssa_partition (tree var
)
1367 int part
= var_to_partition (SA
.map
, var
);
1368 gcc_assert (part
!= NO_PARTITION
);
1370 if (SA
.partition_to_pseudo
[part
])
1373 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1374 TYPE_MODE (TREE_TYPE (var
)),
1375 TYPE_ALIGN (TREE_TYPE (var
)));
1377 /* If the variable alignment is very large we'll dynamicaly allocate
1378 it, which means that in-frame portion is just a pointer. */
1379 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1380 align
= POINTER_SIZE
;
1382 record_alignment_for_reg_var (align
);
1384 if (!use_register_for_decl (var
))
1386 if (defer_stack_allocation (var
, true))
1387 add_stack_var (var
);
1389 expand_one_stack_var_1 (var
);
1393 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1395 rtx x
= gen_reg_rtx (reg_mode
);
1400 /* Record the association between the RTL generated for partition PART
1401 and the underlying variable of the SSA_NAME VAR. */
1404 adjust_one_expanded_partition_var (tree var
)
1409 tree decl
= SSA_NAME_VAR (var
);
1411 int part
= var_to_partition (SA
.map
, var
);
1412 if (part
== NO_PARTITION
)
1415 rtx x
= SA
.partition_to_pseudo
[part
];
1424 /* Note if the object is a user variable. */
1425 if (decl
&& !DECL_ARTIFICIAL (decl
))
1428 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1429 mark_reg_pointer (x
, get_pointer_alignment (var
));
1432 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1433 that will reside in a pseudo register. */
1436 expand_one_register_var (tree var
)
1438 if (TREE_CODE (var
) == SSA_NAME
)
1440 int part
= var_to_partition (SA
.map
, var
);
1441 if (part
!= NO_PARTITION
)
1443 rtx x
= SA
.partition_to_pseudo
[part
];
1445 gcc_assert (REG_P (x
));
1452 tree type
= TREE_TYPE (decl
);
1453 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1454 rtx x
= gen_reg_rtx (reg_mode
);
1458 /* Note if the object is a user variable. */
1459 if (!DECL_ARTIFICIAL (decl
))
1462 if (POINTER_TYPE_P (type
))
1463 mark_reg_pointer (x
, get_pointer_alignment (var
));
1466 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1467 has some associated error, e.g. its type is error-mark. We just need
1468 to pick something that won't crash the rest of the compiler. */
1471 expand_one_error_var (tree var
)
1473 machine_mode mode
= DECL_MODE (var
);
1476 if (mode
== BLKmode
)
1477 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1478 else if (mode
== VOIDmode
)
1481 x
= gen_reg_rtx (mode
);
1483 SET_DECL_RTL (var
, x
);
1486 /* A subroutine of expand_one_var. VAR is a variable that will be
1487 allocated to the local stack frame. Return true if we wish to
1488 add VAR to STACK_VARS so that it will be coalesced with other
1489 variables. Return false to allocate VAR immediately.
1491 This function is used to reduce the number of variables considered
1492 for coalescing, which reduces the size of the quadratic problem. */
1495 defer_stack_allocation (tree var
, bool toplevel
)
1497 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1498 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1499 : DECL_SIZE_UNIT (var
);
1501 /* Whether the variable is small enough for immediate allocation not to be
1502 a problem with regard to the frame size. */
1504 = ((HOST_WIDE_INT
) tree_to_uhwi (size_unit
)
1505 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
));
1507 /* If stack protection is enabled, *all* stack variables must be deferred,
1508 so that we can re-order the strings to the top of the frame.
1509 Similarly for Address Sanitizer. */
1510 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1513 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1514 ? TYPE_ALIGN (TREE_TYPE (var
))
1517 /* We handle "large" alignment via dynamic allocation. We want to handle
1518 this extra complication in only one place, so defer them. */
1519 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1522 bool ignored
= TREE_CODE (var
) == SSA_NAME
1523 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1524 : DECL_IGNORED_P (var
);
1526 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1527 might be detached from their block and appear at toplevel when we reach
1528 here. We want to coalesce them with variables from other blocks when
1529 the immediate contribution to the frame size would be noticeable. */
1530 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1533 /* Variables declared in the outermost scope automatically conflict
1534 with every other variable. The only reason to want to defer them
1535 at all is that, after sorting, we can more efficiently pack
1536 small variables in the stack frame. Continue to defer at -O2. */
1537 if (toplevel
&& optimize
< 2)
1540 /* Without optimization, *most* variables are allocated from the
1541 stack, which makes the quadratic problem large exactly when we
1542 want compilation to proceed as quickly as possible. On the
1543 other hand, we don't want the function's stack frame size to
1544 get completely out of hand. So we avoid adding scalars and
1545 "small" aggregates to the list at all. */
1546 if (optimize
== 0 && smallish
)
1552 /* A subroutine of expand_used_vars. Expand one variable according to
1553 its flavor. Variables to be placed on the stack are not actually
1554 expanded yet, merely recorded.
1555 When REALLY_EXPAND is false, only add stack values to be allocated.
1556 Return stack usage this variable is supposed to take.
1559 static HOST_WIDE_INT
1560 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1562 unsigned int align
= BITS_PER_UNIT
;
1567 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1569 if (is_global_var (var
))
1572 /* Because we don't know if VAR will be in register or on stack,
1573 we conservatively assume it will be on stack even if VAR is
1574 eventually put into register after RA pass. For non-automatic
1575 variables, which won't be on stack, we collect alignment of
1576 type and ignore user specified alignment. Similarly for
1577 SSA_NAMEs for which use_register_for_decl returns true. */
1578 if (TREE_STATIC (var
)
1579 || DECL_EXTERNAL (var
)
1580 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1581 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1582 TYPE_MODE (TREE_TYPE (var
)),
1583 TYPE_ALIGN (TREE_TYPE (var
)));
1584 else if (DECL_HAS_VALUE_EXPR_P (var
)
1585 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1586 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1587 or variables which were assigned a stack slot already by
1588 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1589 changed from the offset chosen to it. */
1590 align
= crtl
->stack_alignment_estimated
;
1592 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1594 /* If the variable alignment is very large we'll dynamicaly allocate
1595 it, which means that in-frame portion is just a pointer. */
1596 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1597 align
= POINTER_SIZE
;
1600 record_alignment_for_reg_var (align
);
1602 if (TREE_CODE (origvar
) == SSA_NAME
)
1604 gcc_assert (!VAR_P (var
)
1605 || (!DECL_EXTERNAL (var
)
1606 && !DECL_HAS_VALUE_EXPR_P (var
)
1607 && !TREE_STATIC (var
)
1608 && TREE_TYPE (var
) != error_mark_node
1609 && !DECL_HARD_REGISTER (var
)
1612 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1614 else if (DECL_EXTERNAL (var
))
1616 else if (DECL_HAS_VALUE_EXPR_P (var
))
1618 else if (TREE_STATIC (var
))
1620 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1622 else if (TREE_TYPE (var
) == error_mark_node
)
1625 expand_one_error_var (var
);
1627 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1631 expand_one_hard_reg_var (var
);
1632 if (!DECL_HARD_REGISTER (var
))
1633 /* Invalid register specification. */
1634 expand_one_error_var (var
);
1637 else if (use_register_for_decl (var
))
1640 expand_one_register_var (origvar
);
1642 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1644 /* Reject variables which cover more than half of the address-space. */
1647 error ("size of variable %q+D is too large", var
);
1648 expand_one_error_var (var
);
1651 else if (defer_stack_allocation (var
, toplevel
))
1652 add_stack_var (origvar
);
1657 if (lookup_attribute ("naked",
1658 DECL_ATTRIBUTES (current_function_decl
)))
1659 error ("cannot allocate stack for variable %q+D, naked function.",
1662 expand_one_stack_var (origvar
);
1666 return tree_to_uhwi (DECL_SIZE_UNIT (var
));
1671 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1672 expanding variables. Those variables that can be put into registers
1673 are allocated pseudos; those that can't are put on the stack.
1675 TOPLEVEL is true if this is the outermost BLOCK. */
1678 expand_used_vars_for_block (tree block
, bool toplevel
)
1682 /* Expand all variables at this level. */
1683 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1685 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1686 || !DECL_NONSHAREABLE (t
)))
1687 expand_one_var (t
, toplevel
, true);
1689 /* Expand all variables at containing levels. */
1690 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1691 expand_used_vars_for_block (t
, false);
1694 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1695 and clear TREE_USED on all local variables. */
1698 clear_tree_used (tree block
)
1702 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1703 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1704 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1705 || !DECL_NONSHAREABLE (t
))
1708 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1709 clear_tree_used (t
);
1713 SPCT_FLAG_DEFAULT
= 1,
1715 SPCT_FLAG_STRONG
= 3,
1716 SPCT_FLAG_EXPLICIT
= 4
1719 /* Examine TYPE and determine a bit mask of the following features. */
1721 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1722 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1723 #define SPCT_HAS_ARRAY 4
1724 #define SPCT_HAS_AGGREGATE 8
1727 stack_protect_classify_type (tree type
)
1729 unsigned int ret
= 0;
1732 switch (TREE_CODE (type
))
1735 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1736 if (t
== char_type_node
1737 || t
== signed_char_type_node
1738 || t
== unsigned_char_type_node
)
1740 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1741 unsigned HOST_WIDE_INT len
;
1743 if (!TYPE_SIZE_UNIT (type
)
1744 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1747 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1750 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1752 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1755 ret
= SPCT_HAS_ARRAY
;
1759 case QUAL_UNION_TYPE
:
1761 ret
= SPCT_HAS_AGGREGATE
;
1762 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1763 if (TREE_CODE (t
) == FIELD_DECL
)
1764 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1774 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1775 part of the local stack frame. Remember if we ever return nonzero for
1776 any variable in this function. The return value is the phase number in
1777 which the variable should be allocated. */
1780 stack_protect_decl_phase (tree decl
)
1782 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1785 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1786 has_short_buffer
= true;
1788 if (flag_stack_protect
== SPCT_FLAG_ALL
1789 || flag_stack_protect
== SPCT_FLAG_STRONG
1790 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1791 && lookup_attribute ("stack_protect",
1792 DECL_ATTRIBUTES (current_function_decl
))))
1794 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1795 && !(bits
& SPCT_HAS_AGGREGATE
))
1797 else if (bits
& SPCT_HAS_ARRAY
)
1801 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1804 has_protected_decls
= true;
1809 /* Two helper routines that check for phase 1 and phase 2. These are used
1810 as callbacks for expand_stack_vars. */
1813 stack_protect_decl_phase_1 (size_t i
)
1815 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1819 stack_protect_decl_phase_2 (size_t i
)
1821 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1824 /* And helper function that checks for asan phase (with stack protector
1825 it is phase 3). This is used as callback for expand_stack_vars.
1826 Returns true if any of the vars in the partition need to be protected. */
1829 asan_decl_phase_3 (size_t i
)
1833 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1835 i
= stack_vars
[i
].next
;
1840 /* Ensure that variables in different stack protection phases conflict
1841 so that they are not merged and share the same stack slot. */
1844 add_stack_protection_conflicts (void)
1846 size_t i
, j
, n
= stack_vars_num
;
1847 unsigned char *phase
;
1849 phase
= XNEWVEC (unsigned char, n
);
1850 for (i
= 0; i
< n
; ++i
)
1851 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1853 for (i
= 0; i
< n
; ++i
)
1855 unsigned char ph_i
= phase
[i
];
1856 for (j
= i
+ 1; j
< n
; ++j
)
1857 if (ph_i
!= phase
[j
])
1858 add_stack_var_conflict (i
, j
);
1864 /* Create a decl for the guard at the top of the stack frame. */
1867 create_stack_guard (void)
1869 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1870 VAR_DECL
, NULL
, ptr_type_node
);
1871 TREE_THIS_VOLATILE (guard
) = 1;
1872 TREE_USED (guard
) = 1;
1873 expand_one_stack_var (guard
);
1874 crtl
->stack_protect_guard
= guard
;
1877 /* Prepare for expanding variables. */
1879 init_vars_expansion (void)
1881 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1882 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1884 /* A map from decl to stack partition. */
1885 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1887 /* Initialize local stack smashing state. */
1888 has_protected_decls
= false;
1889 has_short_buffer
= false;
1892 /* Free up stack variable graph data. */
1894 fini_vars_expansion (void)
1896 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1898 XDELETEVEC (stack_vars
);
1899 if (stack_vars_sorted
)
1900 XDELETEVEC (stack_vars_sorted
);
1902 stack_vars_sorted
= NULL
;
1903 stack_vars_alloc
= stack_vars_num
= 0;
1904 delete decl_to_stack_part
;
1905 decl_to_stack_part
= NULL
;
1908 /* Make a fair guess for the size of the stack frame of the function
1909 in NODE. This doesn't have to be exact, the result is only used in
1910 the inline heuristics. So we don't want to run the full stack var
1911 packing algorithm (which is quadratic in the number of stack vars).
1912 Instead, we calculate the total size of all stack vars. This turns
1913 out to be a pretty fair estimate -- packing of stack vars doesn't
1914 happen very often. */
1917 estimated_stack_frame_size (struct cgraph_node
*node
)
1919 HOST_WIDE_INT size
= 0;
1922 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1926 init_vars_expansion ();
1928 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1929 if (auto_var_in_fn_p (var
, fn
->decl
))
1930 size
+= expand_one_var (var
, true, false);
1932 if (stack_vars_num
> 0)
1934 /* Fake sorting the stack vars for account_stack_vars (). */
1935 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1936 for (i
= 0; i
< stack_vars_num
; ++i
)
1937 stack_vars_sorted
[i
] = i
;
1938 size
+= account_stack_vars ();
1941 fini_vars_expansion ();
1946 /* Helper routine to check if a record or union contains an array field. */
1949 record_or_union_type_has_array_p (const_tree tree_type
)
1951 tree fields
= TYPE_FIELDS (tree_type
);
1954 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1955 if (TREE_CODE (f
) == FIELD_DECL
)
1957 tree field_type
= TREE_TYPE (f
);
1958 if (RECORD_OR_UNION_TYPE_P (field_type
)
1959 && record_or_union_type_has_array_p (field_type
))
1961 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1967 /* Check if the current function has local referenced variables that
1968 have their addresses taken, contain an array, or are arrays. */
1971 stack_protect_decl_p ()
1976 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1977 if (!is_global_var (var
))
1979 tree var_type
= TREE_TYPE (var
);
1981 && (TREE_CODE (var_type
) == ARRAY_TYPE
1982 || TREE_ADDRESSABLE (var
)
1983 || (RECORD_OR_UNION_TYPE_P (var_type
)
1984 && record_or_union_type_has_array_p (var_type
))))
1990 /* Check if the current function has calls that use a return slot. */
1993 stack_protect_return_slot_p ()
1997 FOR_ALL_BB_FN (bb
, cfun
)
1998 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
1999 !gsi_end_p (gsi
); gsi_next (&gsi
))
2001 gimple
*stmt
= gsi_stmt (gsi
);
2002 /* This assumes that calls to internal-only functions never
2003 use a return slot. */
2004 if (is_gimple_call (stmt
)
2005 && !gimple_call_internal_p (stmt
)
2006 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2007 gimple_call_fndecl (stmt
)))
2013 /* Expand all variables used in the function. */
2016 expand_used_vars (void)
2018 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2019 auto_vec
<tree
> maybe_local_decls
;
2020 rtx_insn
*var_end_seq
= NULL
;
2023 bool gen_stack_protect_signal
= false;
2025 /* Compute the phase of the stack frame for this function. */
2027 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2028 int off
= targetm
.starting_frame_offset () % align
;
2029 frame_phase
= off
? align
- off
: 0;
2032 /* Set TREE_USED on all variables in the local_decls. */
2033 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2034 TREE_USED (var
) = 1;
2035 /* Clear TREE_USED on all variables associated with a block scope. */
2036 clear_tree_used (DECL_INITIAL (current_function_decl
));
2038 init_vars_expansion ();
2040 if (targetm
.use_pseudo_pic_reg ())
2041 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2043 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2045 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2048 tree var
= partition_to_var (SA
.map
, i
);
2050 gcc_assert (!virtual_operand_p (var
));
2052 expand_one_ssa_partition (var
);
2055 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2056 gen_stack_protect_signal
2057 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2059 /* At this point all variables on the local_decls with TREE_USED
2060 set are not associated with any block scope. Lay them out. */
2062 len
= vec_safe_length (cfun
->local_decls
);
2063 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2065 bool expand_now
= false;
2067 /* Expanded above already. */
2068 if (is_gimple_reg (var
))
2070 TREE_USED (var
) = 0;
2073 /* We didn't set a block for static or extern because it's hard
2074 to tell the difference between a global variable (re)declared
2075 in a local scope, and one that's really declared there to
2076 begin with. And it doesn't really matter much, since we're
2077 not giving them stack space. Expand them now. */
2078 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2081 /* Expand variables not associated with any block now. Those created by
2082 the optimizers could be live anywhere in the function. Those that
2083 could possibly have been scoped originally and detached from their
2084 block will have their allocation deferred so we coalesce them with
2085 others when optimization is enabled. */
2086 else if (TREE_USED (var
))
2089 /* Finally, mark all variables on the list as used. We'll use
2090 this in a moment when we expand those associated with scopes. */
2091 TREE_USED (var
) = 1;
2094 expand_one_var (var
, true, true);
2097 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2099 rtx rtl
= DECL_RTL_IF_SET (var
);
2101 /* Keep artificial non-ignored vars in cfun->local_decls
2102 chain until instantiate_decls. */
2103 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2104 add_local_decl (cfun
, var
);
2105 else if (rtl
== NULL_RTX
)
2106 /* If rtl isn't set yet, which can happen e.g. with
2107 -fstack-protector, retry before returning from this
2109 maybe_local_decls
.safe_push (var
);
2113 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2115 +-----------------+-----------------+
2116 | ...processed... | ...duplicates...|
2117 +-----------------+-----------------+
2119 +-- LEN points here.
2121 We just want the duplicates, as those are the artificial
2122 non-ignored vars that we want to keep until instantiate_decls.
2123 Move them down and truncate the array. */
2124 if (!vec_safe_is_empty (cfun
->local_decls
))
2125 cfun
->local_decls
->block_remove (0, len
);
2127 /* At this point, all variables within the block tree with TREE_USED
2128 set are actually used by the optimized function. Lay them out. */
2129 expand_used_vars_for_block (outer_block
, true);
2131 if (stack_vars_num
> 0)
2133 add_scope_conflicts ();
2135 /* If stack protection is enabled, we don't share space between
2136 vulnerable data and non-vulnerable data. */
2137 if (flag_stack_protect
!= 0
2138 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2139 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2140 && lookup_attribute ("stack_protect",
2141 DECL_ATTRIBUTES (current_function_decl
)))))
2142 add_stack_protection_conflicts ();
2144 /* Now that we have collected all stack variables, and have computed a
2145 minimal interference graph, attempt to save some stack space. */
2146 partition_stack_vars ();
2148 dump_stack_var_partition ();
2151 switch (flag_stack_protect
)
2154 create_stack_guard ();
2157 case SPCT_FLAG_STRONG
:
2158 if (gen_stack_protect_signal
2159 || cfun
->calls_alloca
|| has_protected_decls
2160 || lookup_attribute ("stack_protect",
2161 DECL_ATTRIBUTES (current_function_decl
)))
2162 create_stack_guard ();
2165 case SPCT_FLAG_DEFAULT
:
2166 if (cfun
->calls_alloca
|| has_protected_decls
2167 || lookup_attribute ("stack_protect",
2168 DECL_ATTRIBUTES (current_function_decl
)))
2169 create_stack_guard ();
2172 case SPCT_FLAG_EXPLICIT
:
2173 if (lookup_attribute ("stack_protect",
2174 DECL_ATTRIBUTES (current_function_decl
)))
2175 create_stack_guard ();
2181 /* Assign rtl to each variable based on these partitions. */
2182 if (stack_vars_num
> 0)
2184 struct stack_vars_data data
;
2186 data
.asan_base
= NULL_RTX
;
2187 data
.asan_alignb
= 0;
2189 /* Reorder decls to be protected by iterating over the variables
2190 array multiple times, and allocating out of each phase in turn. */
2191 /* ??? We could probably integrate this into the qsort we did
2192 earlier, such that we naturally see these variables first,
2193 and thus naturally allocate things in the right order. */
2194 if (has_protected_decls
)
2196 /* Phase 1 contains only character arrays. */
2197 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2199 /* Phase 2 contains other kinds of arrays. */
2200 if (flag_stack_protect
== SPCT_FLAG_ALL
2201 || flag_stack_protect
== SPCT_FLAG_STRONG
2202 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2203 && lookup_attribute ("stack_protect",
2204 DECL_ATTRIBUTES (current_function_decl
))))
2205 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2208 if (asan_sanitize_stack_p ())
2209 /* Phase 3, any partitions that need asan protection
2210 in addition to phase 1 and 2. */
2211 expand_stack_vars (asan_decl_phase_3
, &data
);
2213 if (!data
.asan_vec
.is_empty ())
2215 HOST_WIDE_INT prev_offset
= frame_offset
;
2216 HOST_WIDE_INT offset
, sz
, redzonesz
;
2217 redzonesz
= ASAN_RED_ZONE_SIZE
;
2218 sz
= data
.asan_vec
[0] - prev_offset
;
2219 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2220 && data
.asan_alignb
<= 4096
2221 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2222 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2223 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2225 = alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
);
2226 data
.asan_vec
.safe_push (prev_offset
);
2227 data
.asan_vec
.safe_push (offset
);
2228 /* Leave space for alignment if STRICT_ALIGNMENT. */
2229 if (STRICT_ALIGNMENT
)
2230 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2231 << ASAN_SHADOW_SHIFT
)
2232 / BITS_PER_UNIT
, 1);
2235 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2238 data
.asan_vec
.address (),
2239 data
.asan_decl_vec
.address (),
2240 data
.asan_vec
.length ());
2243 expand_stack_vars (NULL
, &data
);
2246 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2247 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2248 virtual_stack_vars_rtx
,
2251 fini_vars_expansion ();
2253 /* If there were any artificial non-ignored vars without rtl
2254 found earlier, see if deferred stack allocation hasn't assigned
2256 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2258 rtx rtl
= DECL_RTL_IF_SET (var
);
2260 /* Keep artificial non-ignored vars in cfun->local_decls
2261 chain until instantiate_decls. */
2262 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2263 add_local_decl (cfun
, var
);
2266 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2267 if (STACK_ALIGNMENT_NEEDED
)
2269 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2270 if (!FRAME_GROWS_DOWNWARD
)
2271 frame_offset
+= align
- 1;
2272 frame_offset
&= -align
;
2279 /* If we need to produce a detailed dump, print the tree representation
2280 for STMT to the dump file. SINCE is the last RTX after which the RTL
2281 generated for STMT should have been appended. */
2284 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2286 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2288 fprintf (dump_file
, "\n;; ");
2289 print_gimple_stmt (dump_file
, stmt
, 0,
2290 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2291 fprintf (dump_file
, "\n");
2293 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2297 /* Maps the blocks that do not contain tree labels to rtx labels. */
2299 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2301 /* Returns the label_rtx expression for a label starting basic block BB. */
2303 static rtx_code_label
*
2304 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2306 gimple_stmt_iterator gsi
;
2309 if (bb
->flags
& BB_RTL
)
2310 return block_label (bb
);
2312 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2316 /* Find the tree label if it is present. */
2318 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2322 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2326 lab
= gimple_label_label (lab_stmt
);
2327 if (DECL_NONLOCAL (lab
))
2330 return jump_target_rtx (lab
);
2333 rtx_code_label
*l
= gen_label_rtx ();
2334 lab_rtx_for_bb
->put (bb
, l
);
2339 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2340 of a basic block where we just expanded the conditional at the end,
2341 possibly clean up the CFG and instruction sequence. LAST is the
2342 last instruction before the just emitted jump sequence. */
2345 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2347 /* Special case: when jumpif decides that the condition is
2348 trivial it emits an unconditional jump (and the necessary
2349 barrier). But we still have two edges, the fallthru one is
2350 wrong. purge_dead_edges would clean this up later. Unfortunately
2351 we have to insert insns (and split edges) before
2352 find_many_sub_basic_blocks and hence before purge_dead_edges.
2353 But splitting edges might create new blocks which depend on the
2354 fact that if there are two edges there's no barrier. So the
2355 barrier would get lost and verify_flow_info would ICE. Instead
2356 of auditing all edge splitters to care for the barrier (which
2357 normally isn't there in a cleaned CFG), fix it here. */
2358 if (BARRIER_P (get_last_insn ()))
2362 /* Now, we have a single successor block, if we have insns to
2363 insert on the remaining edge we potentially will insert
2364 it at the end of this block (if the dest block isn't feasible)
2365 in order to avoid splitting the edge. This insertion will take
2366 place in front of the last jump. But we might have emitted
2367 multiple jumps (conditional and one unconditional) to the
2368 same destination. Inserting in front of the last one then
2369 is a problem. See PR 40021. We fix this by deleting all
2370 jumps except the last unconditional one. */
2371 insn
= PREV_INSN (get_last_insn ());
2372 /* Make sure we have an unconditional jump. Otherwise we're
2374 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2375 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2377 insn
= PREV_INSN (insn
);
2378 if (JUMP_P (NEXT_INSN (insn
)))
2380 if (!any_condjump_p (NEXT_INSN (insn
)))
2382 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2383 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2385 delete_insn (NEXT_INSN (insn
));
2391 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2392 Returns a new basic block if we've terminated the current basic
2393 block and created a new one. */
2396 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2398 basic_block new_bb
, dest
;
2401 rtx_insn
*last2
, *last
;
2402 enum tree_code code
;
2405 code
= gimple_cond_code (stmt
);
2406 op0
= gimple_cond_lhs (stmt
);
2407 op1
= gimple_cond_rhs (stmt
);
2408 /* We're sometimes presented with such code:
2412 This would expand to two comparisons which then later might
2413 be cleaned up by combine. But some pattern matchers like if-conversion
2414 work better when there's only one compare, so make up for this
2415 here as special exception if TER would have made the same change. */
2417 && TREE_CODE (op0
) == SSA_NAME
2418 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2419 && TREE_CODE (op1
) == INTEGER_CST
2420 && ((gimple_cond_code (stmt
) == NE_EXPR
2421 && integer_zerop (op1
))
2422 || (gimple_cond_code (stmt
) == EQ_EXPR
2423 && integer_onep (op1
)))
2424 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2426 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2427 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2429 enum tree_code code2
= gimple_assign_rhs_code (second
);
2430 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2433 op0
= gimple_assign_rhs1 (second
);
2434 op1
= gimple_assign_rhs2 (second
);
2436 /* If jumps are cheap and the target does not support conditional
2437 compare, turn some more codes into jumpy sequences. */
2438 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2439 && targetm
.gen_ccmp_first
== NULL
)
2441 if ((code2
== BIT_AND_EXPR
2442 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2443 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2444 || code2
== TRUTH_AND_EXPR
)
2446 code
= TRUTH_ANDIF_EXPR
;
2447 op0
= gimple_assign_rhs1 (second
);
2448 op1
= gimple_assign_rhs2 (second
);
2450 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2452 code
= TRUTH_ORIF_EXPR
;
2453 op0
= gimple_assign_rhs1 (second
);
2454 op1
= gimple_assign_rhs2 (second
);
2460 last2
= last
= get_last_insn ();
2462 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2463 set_curr_insn_location (gimple_location (stmt
));
2465 /* These flags have no purpose in RTL land. */
2466 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2467 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2469 /* We can either have a pure conditional jump with one fallthru edge or
2470 two-way jump that needs to be decomposed into two basic blocks. */
2471 if (false_edge
->dest
== bb
->next_bb
)
2473 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2474 true_edge
->probability
);
2475 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2476 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2477 set_curr_insn_location (true_edge
->goto_locus
);
2478 false_edge
->flags
|= EDGE_FALLTHRU
;
2479 maybe_cleanup_end_of_block (false_edge
, last
);
2482 if (true_edge
->dest
== bb
->next_bb
)
2484 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2485 false_edge
->probability
);
2486 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2487 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2488 set_curr_insn_location (false_edge
->goto_locus
);
2489 true_edge
->flags
|= EDGE_FALLTHRU
;
2490 maybe_cleanup_end_of_block (true_edge
, last
);
2494 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2495 true_edge
->probability
);
2496 last
= get_last_insn ();
2497 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2498 set_curr_insn_location (false_edge
->goto_locus
);
2499 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2502 if (BARRIER_P (BB_END (bb
)))
2503 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2504 update_bb_for_insn (bb
);
2506 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2507 dest
= false_edge
->dest
;
2508 redirect_edge_succ (false_edge
, new_bb
);
2509 false_edge
->flags
|= EDGE_FALLTHRU
;
2510 new_bb
->count
= false_edge
->count ();
2511 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
2512 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2513 add_bb_to_loop (new_bb
, loop
);
2514 if (loop
->latch
== bb
2515 && loop
->header
== dest
)
2516 loop
->latch
= new_bb
;
2517 make_single_succ_edge (new_bb
, dest
, 0);
2518 if (BARRIER_P (BB_END (new_bb
)))
2519 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2520 update_bb_for_insn (new_bb
);
2522 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2524 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2526 set_curr_insn_location (true_edge
->goto_locus
);
2527 true_edge
->goto_locus
= curr_insn_location ();
2533 /* Mark all calls that can have a transaction restart. */
2536 mark_transaction_restart_calls (gimple
*stmt
)
2538 struct tm_restart_node dummy
;
2539 tm_restart_node
**slot
;
2541 if (!cfun
->gimple_df
->tm_restart
)
2545 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2548 struct tm_restart_node
*n
= *slot
;
2549 tree list
= n
->label_or_list
;
2552 for (insn
= next_real_insn (get_last_insn ());
2554 insn
= next_real_insn (insn
))
2557 if (TREE_CODE (list
) == LABEL_DECL
)
2558 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2560 for (; list
; list
= TREE_CHAIN (list
))
2561 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2565 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2569 expand_call_stmt (gcall
*stmt
)
2571 tree exp
, decl
, lhs
;
2575 if (gimple_call_internal_p (stmt
))
2577 expand_internal_call (stmt
);
2581 /* If this is a call to a built-in function and it has no effect other
2582 than setting the lhs, try to implement it using an internal function
2584 decl
= gimple_call_fndecl (stmt
);
2585 if (gimple_call_lhs (stmt
)
2586 && !gimple_has_side_effects (stmt
)
2587 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2589 internal_fn ifn
= replacement_internal_fn (stmt
);
2590 if (ifn
!= IFN_LAST
)
2592 expand_internal_call (ifn
, stmt
);
2597 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2599 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2600 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2602 /* If this is not a builtin function, the function type through which the
2603 call is made may be different from the type of the function. */
2606 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2607 CALL_EXPR_FN (exp
));
2609 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2610 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2612 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2614 tree arg
= gimple_call_arg (stmt
, i
);
2616 /* TER addresses into arguments of builtin functions so we have a
2617 chance to infer more correct alignment information. See PR39954. */
2619 && TREE_CODE (arg
) == SSA_NAME
2620 && (def
= get_gimple_for_ssa_name (arg
))
2621 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2622 arg
= gimple_assign_rhs1 (def
);
2623 CALL_EXPR_ARG (exp
, i
) = arg
;
2626 if (gimple_has_side_effects (stmt
))
2627 TREE_SIDE_EFFECTS (exp
) = 1;
2629 if (gimple_call_nothrow_p (stmt
))
2630 TREE_NOTHROW (exp
) = 1;
2632 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2633 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2634 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2636 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2637 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2638 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2640 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2641 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2642 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2643 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2644 CALL_WITH_BOUNDS_P (exp
) = gimple_call_with_bounds_p (stmt
);
2646 /* Ensure RTL is created for debug args. */
2647 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2649 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2654 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2656 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2657 expand_debug_expr (dtemp
);
2661 rtx_insn
*before_call
= get_last_insn ();
2662 lhs
= gimple_call_lhs (stmt
);
2664 expand_assignment (lhs
, exp
, false);
2666 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2668 /* If the gimple call is an indirect call and has 'nocf_check'
2669 attribute find a generated CALL insn to mark it as no
2670 control-flow verification is needed. */
2671 if (gimple_call_nocf_check_p (stmt
)
2672 && !gimple_call_fndecl (stmt
))
2674 rtx_insn
*last
= get_last_insn ();
2675 while (!CALL_P (last
)
2676 && last
!= before_call
)
2677 last
= PREV_INSN (last
);
2679 if (last
!= before_call
)
2680 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2683 mark_transaction_restart_calls (stmt
);
2687 /* Generate RTL for an asm statement (explicit assembler code).
2688 STRING is a STRING_CST node containing the assembler code text,
2689 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2690 insn is volatile; don't optimize it. */
2693 expand_asm_loc (tree string
, int vol
, location_t locus
)
2697 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2698 ggc_strdup (TREE_STRING_POINTER (string
)),
2701 MEM_VOLATILE_P (body
) = vol
;
2703 /* Non-empty basic ASM implicitly clobbers memory. */
2704 if (TREE_STRING_LENGTH (string
) != 0)
2707 unsigned i
, nclobbers
;
2708 auto_vec
<rtx
> input_rvec
, output_rvec
;
2709 auto_vec
<const char *> constraints
;
2710 auto_vec
<rtx
> clobber_rvec
;
2711 HARD_REG_SET clobbered_regs
;
2712 CLEAR_HARD_REG_SET (clobbered_regs
);
2714 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2715 clobber_rvec
.safe_push (clob
);
2717 if (targetm
.md_asm_adjust
)
2718 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2719 constraints
, clobber_rvec
,
2723 nclobbers
= clobber_rvec
.length ();
2724 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2726 XVECEXP (body
, 0, 0) = asm_op
;
2727 for (i
= 0; i
< nclobbers
; i
++)
2728 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2734 /* Return the number of times character C occurs in string S. */
2736 n_occurrences (int c
, const char *s
)
2744 /* A subroutine of expand_asm_operands. Check that all operands have
2745 the same number of alternatives. Return true if so. */
2748 check_operand_nalternatives (const vec
<const char *> &constraints
)
2750 unsigned len
= constraints
.length();
2753 int nalternatives
= n_occurrences (',', constraints
[0]);
2755 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2757 error ("too many alternatives in %<asm%>");
2761 for (unsigned i
= 1; i
< len
; ++i
)
2762 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2764 error ("operand constraints for %<asm%> differ "
2765 "in number of alternatives");
2772 /* Check for overlap between registers marked in CLOBBERED_REGS and
2773 anything inappropriate in T. Emit error and return the register
2774 variable definition for error, NULL_TREE for ok. */
2777 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2779 /* Conflicts between asm-declared register variables and the clobber
2780 list are not allowed. */
2781 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2785 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2786 DECL_NAME (overlap
));
2788 /* Reset registerness to stop multiple errors emitted for a single
2790 DECL_REGISTER (overlap
) = 0;
2797 /* Generate RTL for an asm statement with arguments.
2798 STRING is the instruction template.
2799 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2800 Each output or input has an expression in the TREE_VALUE and
2801 a tree list in TREE_PURPOSE which in turn contains a constraint
2802 name in TREE_VALUE (or NULL_TREE) and a constraint string
2804 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2805 that is clobbered by this insn.
2807 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2808 should be the fallthru basic block of the asm goto.
2810 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2811 Some elements of OUTPUTS may be replaced with trees representing temporary
2812 values. The caller should copy those temporary values to the originally
2815 VOL nonzero means the insn is volatile; don't optimize it. */
2818 expand_asm_stmt (gasm
*stmt
)
2820 class save_input_location
2825 explicit save_input_location(location_t where
)
2827 old
= input_location
;
2828 input_location
= where
;
2831 ~save_input_location()
2833 input_location
= old
;
2837 location_t locus
= gimple_location (stmt
);
2839 if (gimple_asm_input_p (stmt
))
2841 const char *s
= gimple_asm_string (stmt
);
2842 tree string
= build_string (strlen (s
), s
);
2843 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2847 /* There are some legacy diagnostics in here, and also avoids a
2848 sixth parameger to targetm.md_asm_adjust. */
2849 save_input_location
s_i_l(locus
);
2851 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2852 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2853 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2856 /* ??? Diagnose during gimplification? */
2857 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2859 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2863 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2864 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2865 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2867 /* Copy the gimple vectors into new vectors that we can manipulate. */
2869 output_tvec
.safe_grow (noutputs
);
2870 input_tvec
.safe_grow (ninputs
);
2871 constraints
.safe_grow (noutputs
+ ninputs
);
2873 for (i
= 0; i
< noutputs
; ++i
)
2875 tree t
= gimple_asm_output_op (stmt
, i
);
2876 output_tvec
[i
] = TREE_VALUE (t
);
2877 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2879 for (i
= 0; i
< ninputs
; i
++)
2881 tree t
= gimple_asm_input_op (stmt
, i
);
2882 input_tvec
[i
] = TREE_VALUE (t
);
2883 constraints
[i
+ noutputs
]
2884 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2887 /* ??? Diagnose during gimplification? */
2888 if (! check_operand_nalternatives (constraints
))
2891 /* Count the number of meaningful clobbered registers, ignoring what
2892 we would ignore later. */
2893 auto_vec
<rtx
> clobber_rvec
;
2894 HARD_REG_SET clobbered_regs
;
2895 CLEAR_HARD_REG_SET (clobbered_regs
);
2897 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2899 clobber_rvec
.reserve (n
);
2900 for (i
= 0; i
< n
; i
++)
2902 tree t
= gimple_asm_clobber_op (stmt
, i
);
2903 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2906 j
= decode_reg_name_and_count (regname
, &nregs
);
2911 /* ??? Diagnose during gimplification? */
2912 error ("unknown register name %qs in %<asm%>", regname
);
2916 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2917 clobber_rvec
.safe_push (x
);
2921 /* Otherwise we should have -1 == empty string
2922 or -3 == cc, which is not a register. */
2923 gcc_assert (j
== -1 || j
== -3);
2927 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
2929 /* Clobbering the PIC register is an error. */
2930 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2932 /* ??? Diagnose during gimplification? */
2933 error ("PIC register clobbered by %qs in %<asm%>",
2938 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2939 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
2940 clobber_rvec
.safe_push (x
);
2944 unsigned nclobbers
= clobber_rvec
.length();
2946 /* First pass over inputs and outputs checks validity and sets
2947 mark_addressable if needed. */
2948 /* ??? Diagnose during gimplification? */
2950 for (i
= 0; i
< noutputs
; ++i
)
2952 tree val
= output_tvec
[i
];
2953 tree type
= TREE_TYPE (val
);
2954 const char *constraint
;
2959 /* Try to parse the output constraint. If that fails, there's
2960 no point in going further. */
2961 constraint
= constraints
[i
];
2962 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
2963 &allows_mem
, &allows_reg
, &is_inout
))
2970 && REG_P (DECL_RTL (val
))
2971 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
2972 mark_addressable (val
);
2975 for (i
= 0; i
< ninputs
; ++i
)
2977 bool allows_reg
, allows_mem
;
2978 const char *constraint
;
2980 constraint
= constraints
[i
+ noutputs
];
2981 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
2982 constraints
.address (),
2983 &allows_mem
, &allows_reg
))
2986 if (! allows_reg
&& allows_mem
)
2987 mark_addressable (input_tvec
[i
]);
2990 /* Second pass evaluates arguments. */
2992 /* Make sure stack is consistent for asm goto. */
2994 do_pending_stack_adjust ();
2995 int old_generating_concat_p
= generating_concat_p
;
2997 /* Vector of RTX's of evaluated output operands. */
2998 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
2999 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3000 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3002 output_rvec
.safe_grow (noutputs
);
3004 for (i
= 0; i
< noutputs
; ++i
)
3006 tree val
= output_tvec
[i
];
3007 tree type
= TREE_TYPE (val
);
3008 bool is_inout
, allows_reg
, allows_mem
, ok
;
3011 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3012 noutputs
, &allows_mem
, &allows_reg
,
3016 /* If an output operand is not a decl or indirect ref and our constraint
3017 allows a register, make a temporary to act as an intermediate.
3018 Make the asm insn write into that, then we will copy it to
3019 the real output operand. Likewise for promoted variables. */
3021 generating_concat_p
= 0;
3023 if ((TREE_CODE (val
) == INDIRECT_REF
3026 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3027 && ! (REG_P (DECL_RTL (val
))
3028 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3032 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3033 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3035 op
= validize_mem (op
);
3037 if (! allows_reg
&& !MEM_P (op
))
3038 error ("output number %d not directly addressable", i
);
3039 if ((! allows_mem
&& MEM_P (op
))
3040 || GET_CODE (op
) == CONCAT
)
3043 op
= gen_reg_rtx (GET_MODE (op
));
3045 generating_concat_p
= old_generating_concat_p
;
3048 emit_move_insn (op
, old_op
);
3050 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3051 emit_move_insn (old_op
, op
);
3052 after_rtl_seq
= get_insns ();
3053 after_rtl_end
= get_last_insn ();
3059 op
= assign_temp (type
, 0, 1);
3060 op
= validize_mem (op
);
3061 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3062 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3064 generating_concat_p
= old_generating_concat_p
;
3066 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3067 expand_assignment (val
, make_tree (type
, op
), false);
3068 after_rtl_seq
= get_insns ();
3069 after_rtl_end
= get_last_insn ();
3072 output_rvec
[i
] = op
;
3075 inout_opnum
.safe_push (i
);
3078 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3079 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3081 input_rvec
.safe_grow (ninputs
);
3082 input_mode
.safe_grow (ninputs
);
3084 generating_concat_p
= 0;
3086 for (i
= 0; i
< ninputs
; ++i
)
3088 tree val
= input_tvec
[i
];
3089 tree type
= TREE_TYPE (val
);
3090 bool allows_reg
, allows_mem
, ok
;
3091 const char *constraint
;
3094 constraint
= constraints
[i
+ noutputs
];
3095 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3096 constraints
.address (),
3097 &allows_mem
, &allows_reg
);
3100 /* EXPAND_INITIALIZER will not generate code for valid initializer
3101 constants, but will still generate code for other types of operand.
3102 This is the behavior we want for constant constraints. */
3103 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3104 allows_reg
? EXPAND_NORMAL
3105 : allows_mem
? EXPAND_MEMORY
3106 : EXPAND_INITIALIZER
);
3108 /* Never pass a CONCAT to an ASM. */
3109 if (GET_CODE (op
) == CONCAT
)
3110 op
= force_reg (GET_MODE (op
), op
);
3111 else if (MEM_P (op
))
3112 op
= validize_mem (op
);
3114 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3116 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3117 op
= force_reg (TYPE_MODE (type
), op
);
3118 else if (!allows_mem
)
3119 warning (0, "asm operand %d probably doesn%'t match constraints",
3121 else if (MEM_P (op
))
3123 /* We won't recognize either volatile memory or memory
3124 with a queued address as available a memory_operand
3125 at this point. Ignore it: clearly this *is* a memory. */
3131 input_mode
[i
] = TYPE_MODE (type
);
3134 /* For in-out operands, copy output rtx to input rtx. */
3135 unsigned ninout
= inout_opnum
.length();
3136 for (i
= 0; i
< ninout
; i
++)
3138 int j
= inout_opnum
[i
];
3139 rtx o
= output_rvec
[j
];
3141 input_rvec
.safe_push (o
);
3142 input_mode
.safe_push (GET_MODE (o
));
3145 sprintf (buffer
, "%d", j
);
3146 constraints
.safe_push (ggc_strdup (buffer
));
3150 /* Sometimes we wish to automatically clobber registers across an asm.
3151 Case in point is when the i386 backend moved from cc0 to a hard reg --
3152 maintaining source-level compatibility means automatically clobbering
3153 the flags register. */
3154 rtx_insn
*after_md_seq
= NULL
;
3155 if (targetm
.md_asm_adjust
)
3156 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3157 constraints
, clobber_rvec
,
3160 /* Do not allow the hook to change the output and input count,
3161 lest it mess up the operand numbering. */
3162 gcc_assert (output_rvec
.length() == noutputs
);
3163 gcc_assert (input_rvec
.length() == ninputs
);
3164 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3166 /* But it certainly can adjust the clobbers. */
3167 nclobbers
= clobber_rvec
.length();
3169 /* Third pass checks for easy conflicts. */
3170 /* ??? Why are we doing this on trees instead of rtx. */
3172 bool clobber_conflict_found
= 0;
3173 for (i
= 0; i
< noutputs
; ++i
)
3174 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3175 clobber_conflict_found
= 1;
3176 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3177 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3178 clobber_conflict_found
= 1;
3180 /* Make vectors for the expression-rtx, constraint strings,
3181 and named operands. */
3183 rtvec argvec
= rtvec_alloc (ninputs
);
3184 rtvec constraintvec
= rtvec_alloc (ninputs
);
3185 rtvec labelvec
= rtvec_alloc (nlabels
);
3187 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3188 : GET_MODE (output_rvec
[0])),
3189 ggc_strdup (gimple_asm_string (stmt
)),
3190 "", 0, argvec
, constraintvec
,
3192 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3194 for (i
= 0; i
< ninputs
; ++i
)
3196 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3197 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3198 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3199 constraints
[i
+ noutputs
],
3203 /* Copy labels to the vector. */
3204 rtx_code_label
*fallthru_label
= NULL
;
3207 basic_block fallthru_bb
= NULL
;
3208 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3210 fallthru_bb
= fallthru
->dest
;
3212 for (i
= 0; i
< nlabels
; ++i
)
3214 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3216 /* If asm goto has any labels in the fallthru basic block, use
3217 a label that we emit immediately after the asm goto. Expansion
3218 may insert further instructions into the same basic block after
3219 asm goto and if we don't do this, insertion of instructions on
3220 the fallthru edge might misbehave. See PR58670. */
3221 if (fallthru_bb
&& label_to_block_fn (cfun
, label
) == fallthru_bb
)
3223 if (fallthru_label
== NULL_RTX
)
3224 fallthru_label
= gen_label_rtx ();
3228 r
= label_rtx (label
);
3229 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3233 /* Now, for each output, construct an rtx
3234 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3235 ARGVEC CONSTRAINTS OPNAMES))
3236 If there is more than one, put them inside a PARALLEL. */
3238 if (nlabels
> 0 && nclobbers
== 0)
3240 gcc_assert (noutputs
== 0);
3241 emit_jump_insn (body
);
3243 else if (noutputs
== 0 && nclobbers
== 0)
3245 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3248 else if (noutputs
== 1 && nclobbers
== 0)
3250 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3251 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3261 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3263 /* For each output operand, store a SET. */
3264 for (i
= 0; i
< noutputs
; ++i
)
3266 rtx src
, o
= output_rvec
[i
];
3269 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3274 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3275 ASM_OPERANDS_TEMPLATE (obody
),
3276 constraints
[i
], i
, argvec
,
3277 constraintvec
, labelvec
, locus
);
3278 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3280 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3283 /* If there are no outputs (but there are some clobbers)
3284 store the bare ASM_OPERANDS into the PARALLEL. */
3286 XVECEXP (body
, 0, i
++) = obody
;
3288 /* Store (clobber REG) for each clobbered register specified. */
3289 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3291 rtx clobbered_reg
= clobber_rvec
[j
];
3293 /* Do sanity check for overlap between clobbers and respectively
3294 input and outputs that hasn't been handled. Such overlap
3295 should have been detected and reported above. */
3296 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3298 /* We test the old body (obody) contents to avoid
3299 tripping over the under-construction body. */
3300 for (unsigned k
= 0; k
< noutputs
; ++k
)
3301 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3302 internal_error ("asm clobber conflict with output operand");
3304 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3305 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3306 internal_error ("asm clobber conflict with input operand");
3309 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3313 emit_jump_insn (body
);
3318 generating_concat_p
= old_generating_concat_p
;
3321 emit_label (fallthru_label
);
3324 emit_insn (after_md_seq
);
3326 emit_insn (after_rtl_seq
);
3329 crtl
->has_asm_statement
= 1;
3332 /* Emit code to jump to the address
3333 specified by the pointer expression EXP. */
3336 expand_computed_goto (tree exp
)
3338 rtx x
= expand_normal (exp
);
3340 do_pending_stack_adjust ();
3341 emit_indirect_jump (x
);
3344 /* Generate RTL code for a `goto' statement with target label LABEL.
3345 LABEL should be a LABEL_DECL tree node that was or will later be
3346 defined with `expand_label'. */
3349 expand_goto (tree label
)
3353 /* Check for a nonlocal goto to a containing function. Should have
3354 gotten translated to __builtin_nonlocal_goto. */
3355 tree context
= decl_function_context (label
);
3356 gcc_assert (!context
|| context
== current_function_decl
);
3359 emit_jump (jump_target_rtx (label
));
3362 /* Output a return with no value. */
3365 expand_null_return_1 (void)
3367 clear_pending_stack_adjust ();
3368 do_pending_stack_adjust ();
3369 emit_jump (return_label
);
3372 /* Generate RTL to return from the current function, with no value.
3373 (That is, we do not do anything about returning any value.) */
3376 expand_null_return (void)
3378 /* If this function was declared to return a value, but we
3379 didn't, clobber the return registers so that they are not
3380 propagated live to the rest of the function. */
3381 clobber_return_register ();
3383 expand_null_return_1 ();
3386 /* Generate RTL to return from the current function, with value VAL. */
3389 expand_value_return (rtx val
)
3391 /* Copy the value to the return location unless it's already there. */
3393 tree decl
= DECL_RESULT (current_function_decl
);
3394 rtx return_reg
= DECL_RTL (decl
);
3395 if (return_reg
!= val
)
3397 tree funtype
= TREE_TYPE (current_function_decl
);
3398 tree type
= TREE_TYPE (decl
);
3399 int unsignedp
= TYPE_UNSIGNED (type
);
3400 machine_mode old_mode
= DECL_MODE (decl
);
3402 if (DECL_BY_REFERENCE (decl
))
3403 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3405 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3407 if (mode
!= old_mode
)
3408 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3410 if (GET_CODE (return_reg
) == PARALLEL
)
3411 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3413 emit_move_insn (return_reg
, val
);
3416 expand_null_return_1 ();
3419 /* Generate RTL to evaluate the expression RETVAL and return it
3420 from the current function. */
3423 expand_return (tree retval
, tree bounds
)
3430 /* If function wants no value, give it none. */
3431 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3433 expand_normal (retval
);
3434 expand_null_return ();
3438 if (retval
== error_mark_node
)
3440 /* Treat this like a return of no value from a function that
3442 expand_null_return ();
3445 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3446 || TREE_CODE (retval
) == INIT_EXPR
)
3447 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3448 retval_rhs
= TREE_OPERAND (retval
, 1);
3450 retval_rhs
= retval
;
3452 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3454 /* Put returned bounds to the right place. */
3455 bounds_rtl
= DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl
));
3461 if (bounds
&& bounds
!= error_mark_node
)
3463 bnd
= expand_normal (bounds
);
3464 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3466 else if (REG_P (bounds_rtl
))
3469 bnd
= chkp_expand_zero_bounds ();
3472 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3473 addr
= gen_rtx_MEM (Pmode
, addr
);
3474 bnd
= targetm
.calls
.load_bounds_for_arg (addr
, NULL
, NULL
);
3477 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3483 gcc_assert (GET_CODE (bounds_rtl
) == PARALLEL
);
3486 bnd
= chkp_expand_zero_bounds ();
3489 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3490 addr
= gen_rtx_MEM (Pmode
, addr
);
3493 for (n
= 0; n
< XVECLEN (bounds_rtl
, 0); n
++)
3495 rtx slot
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 0);
3498 rtx offs
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 1);
3499 rtx from
= adjust_address (addr
, Pmode
, INTVAL (offs
));
3500 bnd
= targetm
.calls
.load_bounds_for_arg (from
, NULL
, NULL
);
3502 targetm
.calls
.store_returned_bounds (slot
, bnd
);
3506 else if (chkp_function_instrumented_p (current_function_decl
)
3507 && !BOUNDED_P (retval_rhs
)
3508 && chkp_type_has_pointer (TREE_TYPE (retval_rhs
))
3509 && TREE_CODE (retval_rhs
) != RESULT_DECL
)
3511 rtx addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3512 addr
= gen_rtx_MEM (Pmode
, addr
);
3514 gcc_assert (MEM_P (result_rtl
));
3516 chkp_copy_bounds_for_stack_parm (result_rtl
, addr
, TREE_TYPE (retval_rhs
));
3519 /* If we are returning the RESULT_DECL, then the value has already
3520 been stored into it, so we don't have to do anything special. */
3521 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3522 expand_value_return (result_rtl
);
3524 /* If the result is an aggregate that is being returned in one (or more)
3525 registers, load the registers here. */
3527 else if (retval_rhs
!= 0
3528 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3529 && REG_P (result_rtl
))
3531 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3534 /* Use the mode of the result value on the return register. */
3535 PUT_MODE (result_rtl
, GET_MODE (val
));
3536 expand_value_return (val
);
3539 expand_null_return ();
3541 else if (retval_rhs
!= 0
3542 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3543 && (REG_P (result_rtl
)
3544 || (GET_CODE (result_rtl
) == PARALLEL
)))
3546 /* Compute the return value into a temporary (usually a pseudo reg). */
3548 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3549 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3550 val
= force_not_mem (val
);
3551 expand_value_return (val
);
3555 /* No hard reg used; calculate value into hard return reg. */
3556 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3557 expand_value_return (result_rtl
);
3561 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3562 STMT that doesn't require special handling for outgoing edges. That
3563 is no tailcalls and no GIMPLE_COND. */
3566 expand_gimple_stmt_1 (gimple
*stmt
)
3570 set_curr_insn_location (gimple_location (stmt
));
3572 switch (gimple_code (stmt
))
3575 op0
= gimple_goto_dest (stmt
);
3576 if (TREE_CODE (op0
) == LABEL_DECL
)
3579 expand_computed_goto (op0
);
3582 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3585 case GIMPLE_PREDICT
:
3589 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3590 if (gimple_switch_num_labels (swtch
) == 1)
3591 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3593 expand_case (swtch
);
3597 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3600 expand_call_stmt (as_a
<gcall
*> (stmt
));
3605 tree bnd
= gimple_return_retbnd (as_a
<greturn
*> (stmt
));
3606 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3608 if (op0
&& op0
!= error_mark_node
)
3610 tree result
= DECL_RESULT (current_function_decl
);
3612 /* Mark we have return statement with missing bounds. */
3614 && chkp_function_instrumented_p (cfun
->decl
)
3616 bnd
= error_mark_node
;
3618 /* If we are not returning the current function's RESULT_DECL,
3619 build an assignment to it. */
3622 /* I believe that a function's RESULT_DECL is unique. */
3623 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3625 /* ??? We'd like to use simply expand_assignment here,
3626 but this fails if the value is of BLKmode but the return
3627 decl is a register. expand_return has special handling
3628 for this combination, which eventually should move
3629 to common code. See comments there. Until then, let's
3630 build a modify expression :-/ */
3631 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3637 expand_null_return ();
3639 expand_return (op0
, bnd
);
3645 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3646 tree lhs
= gimple_assign_lhs (assign_stmt
);
3648 /* Tree expand used to fiddle with |= and &= of two bitfield
3649 COMPONENT_REFs here. This can't happen with gimple, the LHS
3650 of binary assigns must be a gimple reg. */
3652 if (TREE_CODE (lhs
) != SSA_NAME
3653 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3654 == GIMPLE_SINGLE_RHS
)
3656 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3657 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3658 == GIMPLE_SINGLE_RHS
);
3659 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3660 /* Do not put locations on possibly shared trees. */
3661 && !is_gimple_min_invariant (rhs
))
3662 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3663 if (TREE_CLOBBER_P (rhs
))
3664 /* This is a clobber to mark the going out of scope for
3668 expand_assignment (lhs
, rhs
,
3669 gimple_assign_nontemporal_move_p (
3675 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3676 struct separate_ops ops
;
3677 bool promoted
= false;
3679 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3680 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3683 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3684 ops
.type
= TREE_TYPE (lhs
);
3685 switch (get_gimple_rhs_class (ops
.code
))
3687 case GIMPLE_TERNARY_RHS
:
3688 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3690 case GIMPLE_BINARY_RHS
:
3691 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3693 case GIMPLE_UNARY_RHS
:
3694 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3699 ops
.location
= gimple_location (stmt
);
3701 /* If we want to use a nontemporal store, force the value to
3702 register first. If we store into a promoted register,
3703 don't directly expand to target. */
3704 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3705 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3712 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3713 /* If TEMP is a VOIDmode constant, use convert_modes to make
3714 sure that we properly convert it. */
3715 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3717 temp
= convert_modes (GET_MODE (target
),
3718 TYPE_MODE (ops
.type
),
3720 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3721 GET_MODE (target
), temp
, unsignedp
);
3724 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3726 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3730 temp
= force_operand (temp
, target
);
3732 emit_move_insn (target
, temp
);
3743 /* Expand one gimple statement STMT and return the last RTL instruction
3744 before any of the newly generated ones.
3746 In addition to generating the necessary RTL instructions this also
3747 sets REG_EH_REGION notes if necessary and sets the current source
3748 location for diagnostics. */
3751 expand_gimple_stmt (gimple
*stmt
)
3753 location_t saved_location
= input_location
;
3754 rtx_insn
*last
= get_last_insn ();
3759 /* We need to save and restore the current source location so that errors
3760 discovered during expansion are emitted with the right location. But
3761 it would be better if the diagnostic routines used the source location
3762 embedded in the tree nodes rather than globals. */
3763 if (gimple_has_location (stmt
))
3764 input_location
= gimple_location (stmt
);
3766 expand_gimple_stmt_1 (stmt
);
3768 /* Free any temporaries used to evaluate this statement. */
3771 input_location
= saved_location
;
3773 /* Mark all insns that may trap. */
3774 lp_nr
= lookup_stmt_eh_lp (stmt
);
3778 for (insn
= next_real_insn (last
); insn
;
3779 insn
= next_real_insn (insn
))
3781 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3782 /* If we want exceptions for non-call insns, any
3783 may_trap_p instruction may throw. */
3784 && GET_CODE (PATTERN (insn
)) != CLOBBER
3785 && GET_CODE (PATTERN (insn
)) != USE
3786 && insn_could_throw_p (insn
))
3787 make_reg_eh_region_note (insn
, 0, lp_nr
);
3794 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3795 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3796 generated a tail call (something that might be denied by the ABI
3797 rules governing the call; see calls.c).
3799 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3800 can still reach the rest of BB. The case here is __builtin_sqrt,
3801 where the NaN result goes through the external function (with a
3802 tailcall) and the normal result happens via a sqrt instruction. */
3805 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3807 rtx_insn
*last2
, *last
;
3810 profile_probability probability
;
3812 last2
= last
= expand_gimple_stmt (stmt
);
3814 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3815 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3818 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3820 *can_fallthru
= true;
3824 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3825 Any instructions emitted here are about to be deleted. */
3826 do_pending_stack_adjust ();
3828 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3829 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3830 EH or abnormal edges, we shouldn't have created a tail call in
3831 the first place. So it seems to me we should just be removing
3832 all edges here, or redirecting the existing fallthru edge to
3835 probability
= profile_probability::never ();
3837 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3839 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3841 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3843 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
3844 if (e
->dest
->frequency
< 0)
3845 e
->dest
->frequency
= 0;
3847 probability
+= e
->probability
;
3854 /* This is somewhat ugly: the call_expr expander often emits instructions
3855 after the sibcall (to perform the function return). These confuse the
3856 find_many_sub_basic_blocks code, so we need to get rid of these. */
3857 last
= NEXT_INSN (last
);
3858 gcc_assert (BARRIER_P (last
));
3860 *can_fallthru
= false;
3861 while (NEXT_INSN (last
))
3863 /* For instance an sqrt builtin expander expands if with
3864 sibcall in the then and label for `else`. */
3865 if (LABEL_P (NEXT_INSN (last
)))
3867 *can_fallthru
= true;
3870 delete_insn (NEXT_INSN (last
));
3873 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3875 e
->probability
= probability
;
3877 update_bb_for_insn (bb
);
3879 if (NEXT_INSN (last
))
3881 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3884 if (BARRIER_P (last
))
3885 BB_END (bb
) = PREV_INSN (last
);
3888 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3893 /* Return the difference between the floor and the truncated result of
3894 a signed division by OP1 with remainder MOD. */
3896 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3898 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3899 return gen_rtx_IF_THEN_ELSE
3900 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3901 gen_rtx_IF_THEN_ELSE
3902 (mode
, gen_rtx_LT (BImode
,
3903 gen_rtx_DIV (mode
, op1
, mod
),
3905 constm1_rtx
, const0_rtx
),
3909 /* Return the difference between the ceil and the truncated result of
3910 a signed division by OP1 with remainder MOD. */
3912 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3914 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3915 return gen_rtx_IF_THEN_ELSE
3916 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3917 gen_rtx_IF_THEN_ELSE
3918 (mode
, gen_rtx_GT (BImode
,
3919 gen_rtx_DIV (mode
, op1
, mod
),
3921 const1_rtx
, const0_rtx
),
3925 /* Return the difference between the ceil and the truncated result of
3926 an unsigned division by OP1 with remainder MOD. */
3928 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3930 /* (mod != 0 ? 1 : 0) */
3931 return gen_rtx_IF_THEN_ELSE
3932 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3933 const1_rtx
, const0_rtx
);
3936 /* Return the difference between the rounded and the truncated result
3937 of a signed division by OP1 with remainder MOD. Halfway cases are
3938 rounded away from zero, rather than to the nearest even number. */
3940 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3942 /* (abs (mod) >= abs (op1) - abs (mod)
3943 ? (op1 / mod > 0 ? 1 : -1)
3945 return gen_rtx_IF_THEN_ELSE
3946 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3947 gen_rtx_MINUS (mode
,
3948 gen_rtx_ABS (mode
, op1
),
3949 gen_rtx_ABS (mode
, mod
))),
3950 gen_rtx_IF_THEN_ELSE
3951 (mode
, gen_rtx_GT (BImode
,
3952 gen_rtx_DIV (mode
, op1
, mod
),
3954 const1_rtx
, constm1_rtx
),
3958 /* Return the difference between the rounded and the truncated result
3959 of a unsigned division by OP1 with remainder MOD. Halfway cases
3960 are rounded away from zero, rather than to the nearest even
3963 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3965 /* (mod >= op1 - mod ? 1 : 0) */
3966 return gen_rtx_IF_THEN_ELSE
3967 (mode
, gen_rtx_GE (BImode
, mod
,
3968 gen_rtx_MINUS (mode
, op1
, mod
)),
3969 const1_rtx
, const0_rtx
);
3972 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3976 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
3979 #ifndef POINTERS_EXTEND_UNSIGNED
3980 gcc_assert (mode
== Pmode
3981 || mode
== targetm
.addr_space
.address_mode (as
));
3982 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
3986 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
3988 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
3991 /* X must have some form of address mode already. */
3992 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
3993 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
3994 x
= lowpart_subreg (mode
, x
, xmode
);
3995 else if (POINTERS_EXTEND_UNSIGNED
> 0)
3996 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
3997 else if (!POINTERS_EXTEND_UNSIGNED
)
3998 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4001 switch (GET_CODE (x
))
4004 if ((SUBREG_PROMOTED_VAR_P (x
)
4005 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4006 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4007 && REG_P (XEXP (SUBREG_REG (x
), 0))
4008 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4009 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4010 && GET_MODE (SUBREG_REG (x
)) == mode
)
4011 return SUBREG_REG (x
);
4014 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4015 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4018 temp
= shallow_copy_rtx (x
);
4019 PUT_MODE (temp
, mode
);
4022 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4024 temp
= gen_rtx_CONST (mode
, temp
);
4028 if (CONST_INT_P (XEXP (x
, 1)))
4030 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4032 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4038 /* Don't know how to express ptr_extend as operation in debug info. */
4041 #endif /* POINTERS_EXTEND_UNSIGNED */
4046 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4047 by avoid_deep_ter_for_debug. */
4049 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4051 /* Split too deep TER chains for debug stmts using debug temporaries. */
4054 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4056 use_operand_p use_p
;
4058 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4060 tree use
= USE_FROM_PTR (use_p
);
4061 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4063 gimple
*g
= get_gimple_for_ssa_name (use
);
4066 if (depth
> 6 && !stmt_ends_bb_p (g
))
4068 if (deep_ter_debug_map
== NULL
)
4069 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4071 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4074 vexpr
= make_node (DEBUG_EXPR_DECL
);
4075 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4076 DECL_ARTIFICIAL (vexpr
) = 1;
4077 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4078 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (use
)));
4079 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4080 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4081 avoid_deep_ter_for_debug (def_temp
, 0);
4084 avoid_deep_ter_for_debug (g
, depth
+ 1);
4088 /* Return an RTX equivalent to the value of the parameter DECL. */
4091 expand_debug_parm_decl (tree decl
)
4093 rtx incoming
= DECL_INCOMING_RTL (decl
);
4096 && GET_MODE (incoming
) != BLKmode
4097 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4098 || (MEM_P (incoming
)
4099 && REG_P (XEXP (incoming
, 0))
4100 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4102 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4104 #ifdef HAVE_window_save
4105 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4106 If the target machine has an explicit window save instruction, the
4107 actual entry value is the corresponding OUTGOING_REGNO instead. */
4108 if (REG_P (incoming
)
4109 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4111 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4112 OUTGOING_REGNO (REGNO (incoming
)), 0);
4113 else if (MEM_P (incoming
))
4115 rtx reg
= XEXP (incoming
, 0);
4116 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4118 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4119 incoming
= replace_equiv_address_nv (incoming
, reg
);
4122 incoming
= copy_rtx (incoming
);
4126 ENTRY_VALUE_EXP (rtl
) = incoming
;
4131 && GET_MODE (incoming
) != BLKmode
4132 && !TREE_ADDRESSABLE (decl
)
4134 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4135 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4136 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4137 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4138 return copy_rtx (incoming
);
4143 /* Return an RTX equivalent to the value of the tree expression EXP. */
4146 expand_debug_expr (tree exp
)
4148 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4149 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4150 machine_mode inner_mode
= VOIDmode
;
4151 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4153 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4155 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4157 case tcc_expression
:
4158 switch (TREE_CODE (exp
))
4163 case WIDEN_MULT_PLUS_EXPR
:
4164 case WIDEN_MULT_MINUS_EXPR
:
4168 case TRUTH_ANDIF_EXPR
:
4169 case TRUTH_ORIF_EXPR
:
4170 case TRUTH_AND_EXPR
:
4172 case TRUTH_XOR_EXPR
:
4175 case TRUTH_NOT_EXPR
:
4184 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4191 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4194 switch (TREE_CODE (exp
))
4200 case WIDEN_LSHIFT_EXPR
:
4201 /* Ensure second operand isn't wider than the first one. */
4202 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4203 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4204 && (GET_MODE_UNIT_PRECISION (mode
)
4205 < GET_MODE_PRECISION (op1_mode
)))
4206 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4215 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4216 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4221 case tcc_comparison
:
4222 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4230 case tcc_exceptional
:
4231 case tcc_declaration
:
4237 switch (TREE_CODE (exp
))
4240 if (!lookup_constant_def (exp
))
4242 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4243 != (size_t) TREE_STRING_LENGTH (exp
))
4245 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4246 op0
= gen_rtx_MEM (BLKmode
, op0
);
4247 set_mem_attributes (op0
, exp
, 0);
4255 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4259 gcc_assert (COMPLEX_MODE_P (mode
));
4260 op0
= expand_debug_expr (TREE_REALPART (exp
));
4261 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4262 return gen_rtx_CONCAT (mode
, op0
, op1
);
4264 case DEBUG_EXPR_DECL
:
4265 op0
= DECL_RTL_IF_SET (exp
);
4270 op0
= gen_rtx_DEBUG_EXPR (mode
);
4271 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4272 SET_DECL_RTL (exp
, op0
);
4282 op0
= DECL_RTL_IF_SET (exp
);
4284 /* This decl was probably optimized away. */
4288 || DECL_EXTERNAL (exp
)
4289 || !TREE_STATIC (exp
)
4291 || DECL_HARD_REGISTER (exp
)
4292 || DECL_IN_CONSTANT_POOL (exp
)
4293 || mode
== VOIDmode
)
4296 op0
= make_decl_rtl_for_debug (exp
);
4298 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4299 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4303 op0
= copy_rtx (op0
);
4305 if (GET_MODE (op0
) == BLKmode
4306 /* If op0 is not BLKmode, but mode is, adjust_mode
4307 below would ICE. While it is likely a FE bug,
4308 try to be robust here. See PR43166. */
4310 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4312 gcc_assert (MEM_P (op0
));
4313 op0
= adjust_address_nv (op0
, mode
, 0);
4323 inner_mode
= GET_MODE (op0
);
4325 if (mode
== inner_mode
)
4328 if (inner_mode
== VOIDmode
)
4330 if (TREE_CODE (exp
) == SSA_NAME
)
4331 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4333 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4334 if (mode
== inner_mode
)
4338 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4340 if (GET_MODE_UNIT_BITSIZE (mode
)
4341 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4342 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4343 else if (GET_MODE_UNIT_BITSIZE (mode
)
4344 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4345 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4347 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4349 else if (FLOAT_MODE_P (mode
))
4351 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4352 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4353 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4355 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4357 else if (FLOAT_MODE_P (inner_mode
))
4360 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4362 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4364 else if (GET_MODE_UNIT_PRECISION (mode
)
4365 == GET_MODE_UNIT_PRECISION (inner_mode
))
4366 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4367 else if (GET_MODE_UNIT_PRECISION (mode
)
4368 < GET_MODE_UNIT_PRECISION (inner_mode
))
4369 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4370 else if (UNARY_CLASS_P (exp
)
4371 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4373 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4375 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4381 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4383 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4384 TREE_OPERAND (exp
, 0),
4385 TREE_OPERAND (exp
, 1));
4387 return expand_debug_expr (newexp
);
4391 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4392 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4396 if (TREE_CODE (exp
) == MEM_REF
)
4398 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4399 || (GET_CODE (op0
) == PLUS
4400 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4401 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4402 Instead just use get_inner_reference. */
4405 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4406 if (!op1
|| !CONST_INT_P (op1
))
4409 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
4412 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4414 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4416 if (op0
== NULL_RTX
)
4419 op0
= gen_rtx_MEM (mode
, op0
);
4420 set_mem_attributes (op0
, exp
, 0);
4421 if (TREE_CODE (exp
) == MEM_REF
4422 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4423 set_mem_expr (op0
, NULL_TREE
);
4424 set_mem_addr_space (op0
, as
);
4428 case TARGET_MEM_REF
:
4429 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4430 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4433 op0
= expand_debug_expr
4434 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4438 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4439 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4441 if (op0
== NULL_RTX
)
4444 op0
= gen_rtx_MEM (mode
, op0
);
4446 set_mem_attributes (op0
, exp
, 0);
4447 set_mem_addr_space (op0
, as
);
4453 case ARRAY_RANGE_REF
:
4458 case VIEW_CONVERT_EXPR
:
4461 HOST_WIDE_INT bitsize
, bitpos
;
4463 int reversep
, volatilep
= 0;
4465 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4466 &unsignedp
, &reversep
, &volatilep
);
4472 orig_op0
= op0
= expand_debug_expr (tem
);
4479 machine_mode addrmode
, offmode
;
4484 op0
= XEXP (op0
, 0);
4485 addrmode
= GET_MODE (op0
);
4486 if (addrmode
== VOIDmode
)
4489 op1
= expand_debug_expr (offset
);
4493 offmode
= GET_MODE (op1
);
4494 if (offmode
== VOIDmode
)
4495 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4497 if (addrmode
!= offmode
)
4498 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4500 /* Don't use offset_address here, we don't need a
4501 recognizable address, and we don't want to generate
4503 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4509 if (mode1
== VOIDmode
)
4511 mode1
= smallest_int_mode_for_size (bitsize
);
4512 if (bitpos
>= BITS_PER_UNIT
)
4514 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
4515 bitpos
%= BITS_PER_UNIT
;
4517 else if (bitpos
< 0)
4520 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
4521 op0
= adjust_address_nv (op0
, mode1
, -units
);
4522 bitpos
+= units
* BITS_PER_UNIT
;
4524 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
4525 op0
= adjust_address_nv (op0
, mode
, 0);
4526 else if (GET_MODE (op0
) != mode1
)
4527 op0
= adjust_address_nv (op0
, mode1
, 0);
4529 op0
= copy_rtx (op0
);
4530 if (op0
== orig_op0
)
4531 op0
= shallow_copy_rtx (op0
);
4532 set_mem_attributes (op0
, exp
, 0);
4535 if (bitpos
== 0 && mode
== GET_MODE (op0
))
4541 if (GET_MODE (op0
) == BLKmode
)
4544 if ((bitpos
% BITS_PER_UNIT
) == 0
4545 && bitsize
== GET_MODE_BITSIZE (mode1
))
4547 machine_mode opmode
= GET_MODE (op0
);
4549 if (opmode
== VOIDmode
)
4550 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4552 /* This condition may hold if we're expanding the address
4553 right past the end of an array that turned out not to
4554 be addressable (i.e., the address was only computed in
4555 debug stmts). The gen_subreg below would rightfully
4556 crash, and the address doesn't really exist, so just
4558 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
4561 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
4562 return simplify_gen_subreg (mode
, op0
, opmode
,
4563 bitpos
/ BITS_PER_UNIT
);
4566 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4567 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4569 : ZERO_EXTRACT
, mode
,
4570 GET_MODE (op0
) != VOIDmode
4572 : TYPE_MODE (TREE_TYPE (tem
)),
4573 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
4577 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4580 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4583 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4586 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4588 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4591 case FIX_TRUNC_EXPR
:
4592 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4595 case POINTER_PLUS_EXPR
:
4596 /* For the rare target where pointers are not the same size as
4597 size_t, we need to check for mis-matched modes and correct
4600 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4601 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4602 && op0_mode
!= op1_mode
)
4604 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4605 /* If OP0 is a partial mode, then we must truncate, even
4606 if it has the same bitsize as OP1 as GCC's
4607 representation of partial modes is opaque. */
4608 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4609 && (GET_MODE_BITSIZE (op0_mode
)
4610 == GET_MODE_BITSIZE (op1_mode
))))
4611 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4613 /* We always sign-extend, regardless of the signedness of
4614 the operand, because the operand is always unsigned
4615 here even if the original C expression is signed. */
4616 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4620 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4623 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4626 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4629 case TRUNC_DIV_EXPR
:
4630 case EXACT_DIV_EXPR
:
4632 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4634 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4636 case TRUNC_MOD_EXPR
:
4637 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4639 case FLOOR_DIV_EXPR
:
4641 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4644 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4645 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4646 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4647 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4650 case FLOOR_MOD_EXPR
:
4652 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4655 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4656 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4657 adj
= simplify_gen_unary (NEG
, mode
,
4658 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4660 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4666 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4667 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4668 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4669 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4673 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4674 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4675 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4676 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4682 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4683 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4684 adj
= simplify_gen_unary (NEG
, mode
,
4685 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4687 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4691 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4692 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4693 adj
= simplify_gen_unary (NEG
, mode
,
4694 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4696 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4699 case ROUND_DIV_EXPR
:
4702 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4703 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4704 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4705 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4709 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4710 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4711 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4712 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4715 case ROUND_MOD_EXPR
:
4718 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4719 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4720 adj
= simplify_gen_unary (NEG
, mode
,
4721 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4723 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4727 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4728 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4729 adj
= simplify_gen_unary (NEG
, mode
,
4730 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4732 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4736 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4740 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4742 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4745 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4748 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4751 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4754 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4757 case TRUTH_AND_EXPR
:
4758 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4762 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4765 case TRUTH_XOR_EXPR
:
4766 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4768 case TRUTH_ANDIF_EXPR
:
4769 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4771 case TRUTH_ORIF_EXPR
:
4772 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4774 case TRUTH_NOT_EXPR
:
4775 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4778 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4782 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4786 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4790 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4794 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4797 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4799 case UNORDERED_EXPR
:
4800 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4803 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4806 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4809 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4812 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4815 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4818 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4821 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4824 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4827 gcc_assert (COMPLEX_MODE_P (mode
));
4828 if (GET_MODE (op0
) == VOIDmode
)
4829 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4830 if (GET_MODE (op1
) == VOIDmode
)
4831 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4832 return gen_rtx_CONCAT (mode
, op0
, op1
);
4835 if (GET_CODE (op0
) == CONCAT
)
4836 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4837 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4839 GET_MODE_INNER (mode
)));
4842 scalar_mode imode
= GET_MODE_INNER (mode
);
4847 re
= adjust_address_nv (op0
, imode
, 0);
4848 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4852 scalar_int_mode ifmode
;
4853 scalar_int_mode ihmode
;
4855 if (!int_mode_for_mode (mode
).exists (&ifmode
)
4856 || !int_mode_for_mode (imode
).exists (&ihmode
))
4858 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4861 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4862 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4863 if (imode
!= ihmode
)
4864 re
= gen_rtx_SUBREG (imode
, re
, 0);
4865 im
= copy_rtx (op0
);
4867 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4868 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4869 if (imode
!= ihmode
)
4870 im
= gen_rtx_SUBREG (imode
, im
, 0);
4872 im
= gen_rtx_NEG (imode
, im
);
4873 return gen_rtx_CONCAT (mode
, re
, im
);
4877 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4878 if (!op0
|| !MEM_P (op0
))
4880 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4881 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4882 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4883 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4884 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4885 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4887 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4889 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
4892 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
4893 &bitsize
, &maxsize
, &reverse
);
4895 || TREE_CODE (decl
) == PARM_DECL
4896 || TREE_CODE (decl
) == RESULT_DECL
)
4897 && (!TREE_ADDRESSABLE (decl
)
4898 || target_for_debug_bind (decl
))
4899 && (bitoffset
% BITS_PER_UNIT
) == 0
4901 && bitsize
== maxsize
)
4903 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4904 return plus_constant (mode
, base
, bitoffset
/ BITS_PER_UNIT
);
4908 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4909 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4912 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4915 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4916 || (GET_CODE (op0
) == PLUS
4917 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4918 && CONST_INT_P (XEXP (op0
, 1)))))
4920 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4922 if (!op1
|| !CONST_INT_P (op1
))
4925 return plus_constant (mode
, op0
, INTVAL (op1
));
4932 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4933 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
4934 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
4942 nelts
= VECTOR_CST_NELTS (exp
);
4943 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
4945 for (i
= 0; i
< nelts
; ++i
)
4947 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
4950 XVECEXP (op0
, 0, i
) = op1
;
4957 if (TREE_CLOBBER_P (exp
))
4959 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
4964 op0
= gen_rtx_CONCATN
4965 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4967 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
4969 op1
= expand_debug_expr (val
);
4972 XVECEXP (op0
, 0, i
) = op1
;
4975 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
4977 op1
= expand_debug_expr
4978 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
4983 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
4984 XVECEXP (op0
, 0, i
) = op1
;
4990 goto flag_unsupported
;
4993 /* ??? Maybe handle some builtins? */
4998 gimple
*g
= get_gimple_for_ssa_name (exp
);
5002 if (deep_ter_debug_map
)
5004 tree
*slot
= deep_ter_debug_map
->get (exp
);
5009 t
= gimple_assign_rhs_to_tree (g
);
5010 op0
= expand_debug_expr (t
);
5016 /* If this is a reference to an incoming value of
5017 parameter that is never used in the code or where the
5018 incoming value is never used in the code, use
5019 PARM_DECL's DECL_RTL if set. */
5020 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5021 && SSA_NAME_VAR (exp
)
5022 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5023 && has_zero_uses (exp
))
5025 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5028 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5033 int part
= var_to_partition (SA
.map
, exp
);
5035 if (part
== NO_PARTITION
)
5038 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5040 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5048 /* Vector stuff. For most of the codes we don't have rtl codes. */
5049 case REALIGN_LOAD_EXPR
:
5050 case REDUC_MAX_EXPR
:
5051 case REDUC_MIN_EXPR
:
5052 case REDUC_PLUS_EXPR
:
5054 case VEC_PACK_FIX_TRUNC_EXPR
:
5055 case VEC_PACK_SAT_EXPR
:
5056 case VEC_PACK_TRUNC_EXPR
:
5057 case VEC_UNPACK_FLOAT_HI_EXPR
:
5058 case VEC_UNPACK_FLOAT_LO_EXPR
:
5059 case VEC_UNPACK_HI_EXPR
:
5060 case VEC_UNPACK_LO_EXPR
:
5061 case VEC_WIDEN_MULT_HI_EXPR
:
5062 case VEC_WIDEN_MULT_LO_EXPR
:
5063 case VEC_WIDEN_MULT_EVEN_EXPR
:
5064 case VEC_WIDEN_MULT_ODD_EXPR
:
5065 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5066 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5071 case ADDR_SPACE_CONVERT_EXPR
:
5072 case FIXED_CONVERT_EXPR
:
5074 case WITH_SIZE_EXPR
:
5075 case BIT_INSERT_EXPR
:
5079 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5080 && SCALAR_INT_MODE_P (mode
))
5083 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5085 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5088 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5090 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5092 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5093 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5097 case WIDEN_MULT_EXPR
:
5098 case WIDEN_MULT_PLUS_EXPR
:
5099 case WIDEN_MULT_MINUS_EXPR
:
5100 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5101 && SCALAR_INT_MODE_P (mode
))
5103 inner_mode
= GET_MODE (op0
);
5104 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5105 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5107 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5108 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5109 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5111 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5112 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5113 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5115 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5116 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5118 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5122 case MULT_HIGHPART_EXPR
:
5123 /* ??? Similar to the above. */
5126 case WIDEN_SUM_EXPR
:
5127 case WIDEN_LSHIFT_EXPR
:
5128 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5129 && SCALAR_INT_MODE_P (mode
))
5132 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5134 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5136 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5137 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5142 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
5155 /* Return an RTX equivalent to the source bind value of the tree expression
5159 expand_debug_source_expr (tree exp
)
5162 machine_mode mode
= VOIDmode
, inner_mode
;
5164 switch (TREE_CODE (exp
))
5168 mode
= DECL_MODE (exp
);
5169 op0
= expand_debug_parm_decl (exp
);
5172 /* See if this isn't an argument that has been completely
5174 if (!DECL_RTL_SET_P (exp
)
5175 && !DECL_INCOMING_RTL (exp
)
5176 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5178 tree aexp
= DECL_ORIGIN (exp
);
5179 if (DECL_CONTEXT (aexp
)
5180 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5182 vec
<tree
, va_gc
> **debug_args
;
5185 debug_args
= decl_debug_args_lookup (current_function_decl
);
5186 if (debug_args
!= NULL
)
5188 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5191 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5201 if (op0
== NULL_RTX
)
5204 inner_mode
= GET_MODE (op0
);
5205 if (mode
== inner_mode
)
5208 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5210 if (GET_MODE_UNIT_BITSIZE (mode
)
5211 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5212 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5213 else if (GET_MODE_UNIT_BITSIZE (mode
)
5214 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5215 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5217 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5219 else if (FLOAT_MODE_P (mode
))
5221 else if (FLOAT_MODE_P (inner_mode
))
5223 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5224 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5226 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5228 else if (GET_MODE_UNIT_PRECISION (mode
)
5229 == GET_MODE_UNIT_PRECISION (inner_mode
))
5230 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5231 else if (GET_MODE_UNIT_PRECISION (mode
)
5232 < GET_MODE_UNIT_PRECISION (inner_mode
))
5233 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5234 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5235 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5237 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5242 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5243 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5244 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5247 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5251 if (exp
== NULL_RTX
)
5254 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5259 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5260 rtx dval
= make_debug_expr_from_rtl (exp
);
5262 /* Emit a debug bind insn before INSN. */
5263 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5264 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5265 VAR_INIT_STATUS_INITIALIZED
);
5267 emit_debug_insn_before (bind
, insn
);
5272 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5274 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5275 switch (*format_ptr
++)
5278 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5283 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5284 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5292 /* Expand the _LOCs in debug insns. We run this after expanding all
5293 regular insns, so that any variables referenced in the function
5294 will have their DECL_RTLs set. */
5297 expand_debug_locations (void)
5300 rtx_insn
*last
= get_last_insn ();
5301 int save_strict_alias
= flag_strict_aliasing
;
5303 /* New alias sets while setting up memory attributes cause
5304 -fcompare-debug failures, even though it doesn't bring about any
5306 flag_strict_aliasing
= 0;
5308 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5309 if (DEBUG_INSN_P (insn
))
5311 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5313 rtx_insn
*prev_insn
, *insn2
;
5316 if (value
== NULL_TREE
)
5320 if (INSN_VAR_LOCATION_STATUS (insn
)
5321 == VAR_INIT_STATUS_UNINITIALIZED
)
5322 val
= expand_debug_source_expr (value
);
5323 /* The avoid_deep_ter_for_debug function inserts
5324 debug bind stmts after SSA_NAME definition, with the
5325 SSA_NAME as the whole bind location. Disable temporarily
5326 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5327 being defined in this DEBUG_INSN. */
5328 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5330 tree
*slot
= deep_ter_debug_map
->get (value
);
5333 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5338 val
= expand_debug_expr (value
);
5340 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5343 val
= expand_debug_expr (value
);
5344 gcc_assert (last
== get_last_insn ());
5348 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5351 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5353 gcc_assert (mode
== GET_MODE (val
)
5354 || (GET_MODE (val
) == VOIDmode
5355 && (CONST_SCALAR_INT_P (val
)
5356 || GET_CODE (val
) == CONST_FIXED
5357 || GET_CODE (val
) == LABEL_REF
)));
5360 INSN_VAR_LOCATION_LOC (insn
) = val
;
5361 prev_insn
= PREV_INSN (insn
);
5362 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5363 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5366 flag_strict_aliasing
= save_strict_alias
;
5369 /* Performs swapping operands of commutative operations to expand
5370 the expensive one first. */
5373 reorder_operands (basic_block bb
)
5375 unsigned int *lattice
; /* Hold cost of each statement. */
5376 unsigned int i
= 0, n
= 0;
5377 gimple_stmt_iterator gsi
;
5383 use_operand_p use_p
;
5384 gimple
*def0
, *def1
;
5386 /* Compute cost of each statement using estimate_num_insns. */
5387 stmts
= bb_seq (bb
);
5388 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5390 stmt
= gsi_stmt (gsi
);
5391 if (!is_gimple_debug (stmt
))
5392 gimple_set_uid (stmt
, n
++);
5394 lattice
= XNEWVEC (unsigned int, n
);
5395 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5398 stmt
= gsi_stmt (gsi
);
5399 if (is_gimple_debug (stmt
))
5401 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5403 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5405 tree use
= USE_FROM_PTR (use_p
);
5407 if (TREE_CODE (use
) != SSA_NAME
)
5409 def_stmt
= get_gimple_for_ssa_name (use
);
5412 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5415 if (!is_gimple_assign (stmt
)
5416 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5418 op0
= gimple_op (stmt
, 1);
5419 op1
= gimple_op (stmt
, 2);
5420 if (TREE_CODE (op0
) != SSA_NAME
5421 || TREE_CODE (op1
) != SSA_NAME
)
5423 /* Swap operands if the second one is more expensive. */
5424 def0
= get_gimple_for_ssa_name (op0
);
5425 def1
= get_gimple_for_ssa_name (op1
);
5429 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5435 fprintf (dump_file
, "Swap operands in stmt:\n");
5436 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5437 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5438 def0
? lattice
[gimple_uid (def0
)] : 0,
5439 lattice
[gimple_uid (def1
)]);
5441 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5442 gimple_assign_rhs2_ptr (stmt
));
5448 /* Expand basic block BB from GIMPLE trees to RTL. */
5451 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5453 gimple_stmt_iterator gsi
;
5455 gimple
*stmt
= NULL
;
5462 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5465 /* Note that since we are now transitioning from GIMPLE to RTL, we
5466 cannot use the gsi_*_bb() routines because they expect the basic
5467 block to be in GIMPLE, instead of RTL. Therefore, we need to
5468 access the BB sequence directly. */
5470 reorder_operands (bb
);
5471 stmts
= bb_seq (bb
);
5472 bb
->il
.gimple
.seq
= NULL
;
5473 bb
->il
.gimple
.phi_nodes
= NULL
;
5474 rtl_profile_for_bb (bb
);
5475 init_rtl_bb_info (bb
);
5476 bb
->flags
|= BB_RTL
;
5478 /* Remove the RETURN_EXPR if we may fall though to the exit
5480 gsi
= gsi_last (stmts
);
5481 if (!gsi_end_p (gsi
)
5482 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5484 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5486 gcc_assert (single_succ_p (bb
));
5487 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5489 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5490 && !gimple_return_retval (ret_stmt
))
5492 gsi_remove (&gsi
, false);
5493 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5497 gsi
= gsi_start (stmts
);
5498 if (!gsi_end_p (gsi
))
5500 stmt
= gsi_stmt (gsi
);
5501 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5505 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5509 last
= get_last_insn ();
5513 expand_gimple_stmt (stmt
);
5520 BB_HEAD (bb
) = NEXT_INSN (last
);
5521 if (NOTE_P (BB_HEAD (bb
)))
5522 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5523 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5525 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5528 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5530 NOTE_BASIC_BLOCK (note
) = bb
;
5532 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5536 stmt
= gsi_stmt (gsi
);
5538 /* If this statement is a non-debug one, and we generate debug
5539 insns, then this one might be the last real use of a TERed
5540 SSA_NAME, but where there are still some debug uses further
5541 down. Expanding the current SSA name in such further debug
5542 uses by their RHS might lead to wrong debug info, as coalescing
5543 might make the operands of such RHS be placed into the same
5544 pseudo as something else. Like so:
5545 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5549 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5550 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5551 the write to a_2 would actually have clobbered the place which
5554 So, instead of that, we recognize the situation, and generate
5555 debug temporaries at the last real use of TERed SSA names:
5562 if (MAY_HAVE_DEBUG_INSNS
5564 && !is_gimple_debug (stmt
))
5570 location_t sloc
= curr_insn_location ();
5572 /* Look for SSA names that have their last use here (TERed
5573 names always have only one real use). */
5574 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5575 if ((def
= get_gimple_for_ssa_name (op
)))
5577 imm_use_iterator imm_iter
;
5578 use_operand_p use_p
;
5579 bool have_debug_uses
= false;
5581 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5583 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5585 have_debug_uses
= true;
5590 if (have_debug_uses
)
5592 /* OP is a TERed SSA name, with DEF its defining
5593 statement, and where OP is used in further debug
5594 instructions. Generate a debug temporary, and
5595 replace all uses of OP in debug insns with that
5598 tree value
= gimple_assign_rhs_to_tree (def
);
5599 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5603 set_curr_insn_location (gimple_location (def
));
5605 DECL_ARTIFICIAL (vexpr
) = 1;
5606 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5608 mode
= DECL_MODE (value
);
5610 mode
= TYPE_MODE (TREE_TYPE (value
));
5611 SET_DECL_MODE (vexpr
, mode
);
5613 val
= gen_rtx_VAR_LOCATION
5614 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5616 emit_debug_insn (val
);
5618 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5620 if (!gimple_debug_bind_p (debugstmt
))
5623 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5624 SET_USE (use_p
, vexpr
);
5626 update_stmt (debugstmt
);
5630 set_curr_insn_location (sloc
);
5633 currently_expanding_gimple_stmt
= stmt
;
5635 /* Expand this statement, then evaluate the resulting RTL and
5636 fixup the CFG accordingly. */
5637 if (gimple_code (stmt
) == GIMPLE_COND
)
5639 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5643 else if (gimple_debug_bind_p (stmt
))
5645 location_t sloc
= curr_insn_location ();
5646 gimple_stmt_iterator nsi
= gsi
;
5650 tree var
= gimple_debug_bind_get_var (stmt
);
5655 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5656 && TREE_CODE (var
) != LABEL_DECL
5657 && !target_for_debug_bind (var
))
5658 goto delink_debug_stmt
;
5660 if (gimple_debug_bind_has_value_p (stmt
))
5661 value
= gimple_debug_bind_get_value (stmt
);
5665 last
= get_last_insn ();
5667 set_curr_insn_location (gimple_location (stmt
));
5670 mode
= DECL_MODE (var
);
5672 mode
= TYPE_MODE (TREE_TYPE (var
));
5674 val
= gen_rtx_VAR_LOCATION
5675 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5677 emit_debug_insn (val
);
5679 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5681 /* We can't dump the insn with a TREE where an RTX
5683 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5684 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5685 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5689 /* In order not to generate too many debug temporaries,
5690 we delink all uses of debug statements we already expanded.
5691 Therefore debug statements between definition and real
5692 use of TERed SSA names will continue to use the SSA name,
5693 and not be replaced with debug temps. */
5694 delink_stmt_imm_use (stmt
);
5698 if (gsi_end_p (nsi
))
5700 stmt
= gsi_stmt (nsi
);
5701 if (!gimple_debug_bind_p (stmt
))
5705 set_curr_insn_location (sloc
);
5707 else if (gimple_debug_source_bind_p (stmt
))
5709 location_t sloc
= curr_insn_location ();
5710 tree var
= gimple_debug_source_bind_get_var (stmt
);
5711 tree value
= gimple_debug_source_bind_get_value (stmt
);
5715 last
= get_last_insn ();
5717 set_curr_insn_location (gimple_location (stmt
));
5719 mode
= DECL_MODE (var
);
5721 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5722 VAR_INIT_STATUS_UNINITIALIZED
);
5724 emit_debug_insn (val
);
5726 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5728 /* We can't dump the insn with a TREE where an RTX
5730 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5731 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5732 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5735 set_curr_insn_location (sloc
);
5739 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5741 && gimple_call_tail_p (call_stmt
)
5742 && disable_tail_calls
)
5743 gimple_call_set_tail (call_stmt
, false);
5745 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5748 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5759 def_operand_p def_p
;
5760 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5764 /* Ignore this stmt if it is in the list of
5765 replaceable expressions. */
5767 && bitmap_bit_p (SA
.values
,
5768 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5771 last
= expand_gimple_stmt (stmt
);
5772 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5777 currently_expanding_gimple_stmt
= NULL
;
5779 /* Expand implicit goto and convert goto_locus. */
5780 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5782 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5783 set_curr_insn_location (e
->goto_locus
);
5784 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5786 emit_jump (label_rtx_for_bb (e
->dest
));
5787 e
->flags
&= ~EDGE_FALLTHRU
;
5791 /* Expanded RTL can create a jump in the last instruction of block.
5792 This later might be assumed to be a jump to successor and break edge insertion.
5793 We need to insert dummy move to prevent this. PR41440. */
5794 if (single_succ_p (bb
)
5795 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5796 && (last
= get_last_insn ())
5798 || (DEBUG_INSN_P (last
)
5799 && JUMP_P (prev_nondebug_insn (last
)))))
5801 rtx dummy
= gen_reg_rtx (SImode
);
5802 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5805 do_pending_stack_adjust ();
5807 /* Find the block tail. The last insn in the block is the insn
5808 before a barrier and/or table jump insn. */
5809 last
= get_last_insn ();
5810 if (BARRIER_P (last
))
5811 last
= PREV_INSN (last
);
5812 if (JUMP_TABLE_DATA_P (last
))
5813 last
= PREV_INSN (PREV_INSN (last
));
5816 update_bb_for_insn (bb
);
5822 /* Create a basic block for initialization code. */
5825 construct_init_block (void)
5827 basic_block init_block
, first_block
;
5831 /* Multiple entry points not supported yet. */
5832 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5833 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5834 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5835 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5836 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5838 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5840 /* When entry edge points to first basic block, we don't need jump,
5841 otherwise we have to jump into proper target. */
5842 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5844 tree label
= gimple_block_label (e
->dest
);
5846 emit_jump (jump_target_rtx (label
));
5850 flags
= EDGE_FALLTHRU
;
5852 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5854 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5855 init_block
->frequency
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5856 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5857 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5860 first_block
= e
->dest
;
5861 redirect_edge_succ (e
, init_block
);
5862 e
= make_single_succ_edge (init_block
, first_block
, flags
);
5865 e
= make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
5868 update_bb_for_insn (init_block
);
5872 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5873 found in the block tree. */
5876 set_block_levels (tree block
, int level
)
5880 BLOCK_NUMBER (block
) = level
;
5881 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5882 block
= BLOCK_CHAIN (block
);
5886 /* Create a block containing landing pads and similar stuff. */
5889 construct_exit_block (void)
5891 rtx_insn
*head
= get_last_insn ();
5893 basic_block exit_block
;
5897 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5898 rtx_insn
*orig_end
= BB_END (prev_bb
);
5900 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5902 /* Make sure the locus is set to the end of the function, so that
5903 epilogue line numbers and warnings are set properly. */
5904 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5905 input_location
= cfun
->function_end_locus
;
5907 /* Generate rtl for function exit. */
5908 expand_function_end ();
5910 end
= get_last_insn ();
5913 /* While emitting the function end we could move end of the last basic
5915 BB_END (prev_bb
) = orig_end
;
5916 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5917 head
= NEXT_INSN (head
);
5918 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5919 bb frequency counting will be confused. Any instructions before that
5920 label are emitted for the case where PREV_BB falls through into the
5921 exit block, so append those instructions to prev_bb in that case. */
5922 if (NEXT_INSN (head
) != return_label
)
5924 while (NEXT_INSN (head
) != return_label
)
5926 if (!NOTE_P (NEXT_INSN (head
)))
5927 BB_END (prev_bb
) = NEXT_INSN (head
);
5928 head
= NEXT_INSN (head
);
5931 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
5932 exit_block
->frequency
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5933 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5934 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5937 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
5939 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
5940 if (!(e
->flags
& EDGE_ABNORMAL
))
5941 redirect_edge_succ (e
, exit_block
);
5946 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
5948 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5951 exit_block
->count
-= e2
->count ();
5952 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
5954 if (exit_block
->frequency
< 0)
5955 exit_block
->frequency
= 0;
5956 update_bb_for_insn (exit_block
);
5959 /* Helper function for discover_nonconstant_array_refs.
5960 Look for ARRAY_REF nodes with non-constant indexes and mark them
5964 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
5965 void *data ATTRIBUTE_UNUSED
)
5969 if (IS_TYPE_OR_DECL_P (t
))
5971 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5973 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5974 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
5975 && (!TREE_OPERAND (t
, 2)
5976 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5977 || (TREE_CODE (t
) == COMPONENT_REF
5978 && (!TREE_OPERAND (t
,2)
5979 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5980 || TREE_CODE (t
) == BIT_FIELD_REF
5981 || TREE_CODE (t
) == REALPART_EXPR
5982 || TREE_CODE (t
) == IMAGPART_EXPR
5983 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
5984 || CONVERT_EXPR_P (t
))
5985 t
= TREE_OPERAND (t
, 0);
5987 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5989 t
= get_base_address (t
);
5991 && DECL_MODE (t
) != BLKmode
)
5992 TREE_ADDRESSABLE (t
) = 1;
6001 /* RTL expansion is not able to compile array references with variable
6002 offsets for arrays stored in single register. Discover such
6003 expressions and mark variables as addressable to avoid this
6007 discover_nonconstant_array_refs (void)
6010 gimple_stmt_iterator gsi
;
6012 FOR_EACH_BB_FN (bb
, cfun
)
6013 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6015 gimple
*stmt
= gsi_stmt (gsi
);
6016 if (!is_gimple_debug (stmt
))
6017 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6021 /* This function sets crtl->args.internal_arg_pointer to a virtual
6022 register if DRAP is needed. Local register allocator will replace
6023 virtual_incoming_args_rtx with the virtual register. */
6026 expand_stack_alignment (void)
6029 unsigned int preferred_stack_boundary
;
6031 if (! SUPPORTS_STACK_ALIGNMENT
)
6034 if (cfun
->calls_alloca
6035 || cfun
->has_nonlocal_label
6036 || crtl
->has_nonlocal_goto
)
6037 crtl
->need_drap
= true;
6039 /* Call update_stack_boundary here again to update incoming stack
6040 boundary. It may set incoming stack alignment to a different
6041 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6042 use the minimum incoming stack alignment to check if it is OK
6043 to perform sibcall optimization since sibcall optimization will
6044 only align the outgoing stack to incoming stack boundary. */
6045 if (targetm
.calls
.update_stack_boundary
)
6046 targetm
.calls
.update_stack_boundary ();
6048 /* The incoming stack frame has to be aligned at least at
6049 parm_stack_boundary. */
6050 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6052 /* Update crtl->stack_alignment_estimated and use it later to align
6053 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6054 exceptions since callgraph doesn't collect incoming stack alignment
6056 if (cfun
->can_throw_non_call_exceptions
6057 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6058 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6060 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6061 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6062 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6063 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6064 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6066 gcc_assert (crtl
->stack_alignment_needed
6067 <= crtl
->stack_alignment_estimated
);
6069 crtl
->stack_realign_needed
6070 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6071 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6073 crtl
->stack_realign_processed
= true;
6075 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6077 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6078 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6080 /* stack_realign_drap and drap_rtx must match. */
6081 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6083 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6084 if (NULL
!= drap_rtx
)
6086 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6088 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6090 fixup_tail_calls ();
6096 expand_main_function (void)
6098 #if (defined(INVOKE__main) \
6099 || (!defined(HAS_INIT_SECTION) \
6100 && !defined(INIT_SECTION_ASM_OP) \
6101 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6102 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6107 /* Expand code to initialize the stack_protect_guard. This is invoked at
6108 the beginning of a function to be protected. */
6111 stack_protect_prologue (void)
6113 tree guard_decl
= targetm
.stack_protect_guard ();
6116 x
= expand_normal (crtl
->stack_protect_guard
);
6118 y
= expand_normal (guard_decl
);
6122 /* Allow the target to copy from Y to X without leaking Y into a
6124 if (targetm
.have_stack_protect_set ())
6125 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6131 /* Otherwise do a straight move. */
6132 emit_move_insn (x
, y
);
6135 /* Translate the intermediate representation contained in the CFG
6136 from GIMPLE trees to RTL.
6138 We do conversion per basic block and preserve/update the tree CFG.
6139 This implies we have to do some magic as the CFG can simultaneously
6140 consist of basic blocks containing RTL and GIMPLE trees. This can
6141 confuse the CFG hooks, so be careful to not manipulate CFG during
6146 const pass_data pass_data_expand
=
6148 RTL_PASS
, /* type */
6149 "expand", /* name */
6150 OPTGROUP_NONE
, /* optinfo_flags */
6151 TV_EXPAND
, /* tv_id */
6152 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6155 | PROP_gimple_lva
), /* properties_required */
6156 PROP_rtl
, /* properties_provided */
6157 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6158 0, /* todo_flags_start */
6159 0, /* todo_flags_finish */
6162 class pass_expand
: public rtl_opt_pass
6165 pass_expand (gcc::context
*ctxt
)
6166 : rtl_opt_pass (pass_data_expand
, ctxt
)
6169 /* opt_pass methods: */
6170 virtual unsigned int execute (function
*);
6172 }; // class pass_expand
6175 pass_expand::execute (function
*fun
)
6177 basic_block bb
, init_block
;
6180 rtx_insn
*var_seq
, *var_ret_seq
;
6183 timevar_push (TV_OUT_OF_SSA
);
6184 rewrite_out_of_ssa (&SA
);
6185 timevar_pop (TV_OUT_OF_SSA
);
6186 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6188 if (MAY_HAVE_DEBUG_STMTS
&& flag_tree_ter
)
6190 gimple_stmt_iterator gsi
;
6191 FOR_EACH_BB_FN (bb
, cfun
)
6192 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6193 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6194 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6197 /* Make sure all values used by the optimization passes have sane
6201 /* Some backends want to know that we are expanding to RTL. */
6202 currently_expanding_to_rtl
= 1;
6203 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6204 free_dominance_info (CDI_DOMINATORS
);
6206 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6208 if (chkp_function_instrumented_p (current_function_decl
))
6209 chkp_reset_rtl_bounds ();
6211 insn_locations_init ();
6212 if (!DECL_IS_BUILTIN (current_function_decl
))
6214 /* Eventually, all FEs should explicitly set function_start_locus. */
6215 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6216 set_curr_insn_location
6217 (DECL_SOURCE_LOCATION (current_function_decl
));
6219 set_curr_insn_location (fun
->function_start_locus
);
6222 set_curr_insn_location (UNKNOWN_LOCATION
);
6223 prologue_location
= curr_insn_location ();
6225 #ifdef INSN_SCHEDULING
6226 init_sched_attrs ();
6229 /* Make sure first insn is a note even if we don't want linenums.
6230 This makes sure the first insn will never be deleted.
6231 Also, final expects a note to appear there. */
6232 emit_note (NOTE_INSN_DELETED
);
6234 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6235 discover_nonconstant_array_refs ();
6237 targetm
.expand_to_rtl_hook ();
6238 crtl
->init_stack_alignment ();
6239 fun
->cfg
->max_jumptable_ents
= 0;
6241 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6242 of the function section at exapnsion time to predict distance of calls. */
6243 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6245 /* Expand the variables recorded during gimple lowering. */
6246 timevar_push (TV_VAR_EXPAND
);
6249 var_ret_seq
= expand_used_vars ();
6251 var_seq
= get_insns ();
6253 timevar_pop (TV_VAR_EXPAND
);
6255 /* Honor stack protection warnings. */
6256 if (warn_stack_protect
)
6258 if (fun
->calls_alloca
)
6259 warning (OPT_Wstack_protector
,
6260 "stack protector not protecting local variables: "
6261 "variable length buffer");
6262 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6263 warning (OPT_Wstack_protector
,
6264 "stack protector not protecting function: "
6265 "all local arrays are less than %d bytes long",
6266 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
6269 /* Set up parameters and prepare for return, for the function. */
6270 expand_function_start (current_function_decl
);
6272 /* If we emitted any instructions for setting up the variables,
6273 emit them before the FUNCTION_START note. */
6276 emit_insn_before (var_seq
, parm_birth_insn
);
6278 /* In expand_function_end we'll insert the alloca save/restore
6279 before parm_birth_insn. We've just insertted an alloca call.
6280 Adjust the pointer to match. */
6281 parm_birth_insn
= var_seq
;
6284 /* Now propagate the RTL assignment of each partition to the
6285 underlying var of each SSA_NAME. */
6288 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6290 /* We might have generated new SSA names in
6291 update_alias_info_with_stack_vars. They will have a NULL
6292 defining statements, and won't be part of the partitioning,
6294 if (!SSA_NAME_DEF_STMT (name
))
6297 adjust_one_expanded_partition_var (name
);
6300 /* Clean up RTL of variables that straddle across multiple
6301 partitions, and check that the rtl of any PARM_DECLs that are not
6302 cleaned up is that of their default defs. */
6303 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6307 /* We might have generated new SSA names in
6308 update_alias_info_with_stack_vars. They will have a NULL
6309 defining statements, and won't be part of the partitioning,
6311 if (!SSA_NAME_DEF_STMT (name
))
6313 part
= var_to_partition (SA
.map
, name
);
6314 if (part
== NO_PARTITION
)
6317 /* If this decl was marked as living in multiple places, reset
6318 this now to NULL. */
6319 tree var
= SSA_NAME_VAR (name
);
6320 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6321 SET_DECL_RTL (var
, NULL
);
6322 /* Check that the pseudos chosen by assign_parms are those of
6323 the corresponding default defs. */
6324 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6325 && (TREE_CODE (var
) == PARM_DECL
6326 || TREE_CODE (var
) == RESULT_DECL
))
6328 rtx in
= DECL_RTL_IF_SET (var
);
6330 rtx out
= SA
.partition_to_pseudo
[part
];
6331 gcc_assert (in
== out
);
6333 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6334 those expected by debug backends for each parm and for
6335 the result. This is particularly important for stabs,
6336 whose register elimination from parm's DECL_RTL may cause
6337 -fcompare-debug differences as SET_DECL_RTL changes reg's
6338 attrs. So, make sure the RTL already has the parm as the
6339 EXPR, so that it won't change. */
6340 SET_DECL_RTL (var
, NULL_RTX
);
6342 set_mem_attributes (in
, var
, true);
6343 SET_DECL_RTL (var
, in
);
6347 /* If this function is `main', emit a call to `__main'
6348 to run global initializers, etc. */
6349 if (DECL_NAME (current_function_decl
)
6350 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6351 && DECL_FILE_SCOPE_P (current_function_decl
))
6352 expand_main_function ();
6354 /* Initialize the stack_protect_guard field. This must happen after the
6355 call to __main (if any) so that the external decl is initialized. */
6356 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6357 stack_protect_prologue ();
6359 expand_phi_nodes (&SA
);
6361 /* Release any stale SSA redirection data. */
6362 redirect_edge_var_map_empty ();
6364 /* Register rtl specific functions for cfg. */
6365 rtl_register_cfg_hooks ();
6367 init_block
= construct_init_block ();
6369 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6370 remaining edges later. */
6371 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6372 e
->flags
&= ~EDGE_EXECUTABLE
;
6374 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6375 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6377 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6379 if (MAY_HAVE_DEBUG_INSNS
)
6380 expand_debug_locations ();
6382 if (deep_ter_debug_map
)
6384 delete deep_ter_debug_map
;
6385 deep_ter_debug_map
= NULL
;
6388 /* Free stuff we no longer need after GIMPLE optimizations. */
6389 free_dominance_info (CDI_DOMINATORS
);
6390 free_dominance_info (CDI_POST_DOMINATORS
);
6391 delete_tree_cfg_annotations (fun
);
6393 timevar_push (TV_OUT_OF_SSA
);
6394 finish_out_of_ssa (&SA
);
6395 timevar_pop (TV_OUT_OF_SSA
);
6397 timevar_push (TV_POST_EXPAND
);
6398 /* We are no longer in SSA form. */
6399 fun
->gimple_df
->in_ssa_p
= false;
6400 loops_state_clear (LOOP_CLOSED_SSA
);
6402 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6403 conservatively to true until they are all profile aware. */
6404 delete lab_rtx_for_bb
;
6405 free_histograms (fun
);
6407 construct_exit_block ();
6408 insn_locations_finalize ();
6412 rtx_insn
*after
= return_label
;
6413 rtx_insn
*next
= NEXT_INSN (after
);
6414 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6416 emit_insn_after (var_ret_seq
, after
);
6419 /* Zap the tree EH table. */
6420 set_eh_throw_stmt_table (fun
, NULL
);
6422 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6423 split edges which edge insertions might do. */
6424 rebuild_jump_labels (get_insns ());
6426 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
6427 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6431 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6435 rebuild_jump_labels_chain (e
->insns
.r
);
6436 /* Put insns after parm birth, but before
6437 NOTE_INSNS_FUNCTION_BEG. */
6438 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
6439 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6441 rtx_insn
*insns
= e
->insns
.r
;
6443 if (NOTE_P (parm_birth_insn
)
6444 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6445 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6447 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6450 commit_one_edge_insertion (e
);
6457 /* We're done expanding trees to RTL. */
6458 currently_expanding_to_rtl
= 0;
6460 flush_mark_addressable_queue ();
6462 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6463 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6467 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6469 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6470 e
->flags
&= ~EDGE_EXECUTABLE
;
6472 /* At the moment not all abnormal edges match the RTL
6473 representation. It is safe to remove them here as
6474 find_many_sub_basic_blocks will rediscover them.
6475 In the future we should get this fixed properly. */
6476 if ((e
->flags
& EDGE_ABNORMAL
)
6477 && !(e
->flags
& EDGE_SIBCALL
))
6484 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6485 bitmap_ones (blocks
);
6486 find_many_sub_basic_blocks (blocks
);
6487 purge_all_dead_edges ();
6489 expand_stack_alignment ();
6491 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6493 if (crtl
->tail_call_emit
)
6494 fixup_tail_calls ();
6496 /* After initial rtl generation, call back to finish generating
6497 exception support code. We need to do this before cleaning up
6498 the CFG as the code does not expect dead landing pads. */
6499 if (fun
->eh
->region_tree
!= NULL
)
6500 finish_eh_generation ();
6502 /* BB subdivision may have created basic blocks that are are only reachable
6503 from unlikely bbs but not marked as such in the profile. */
6505 propagate_unlikely_bbs_forward ();
6507 /* Remove unreachable blocks, otherwise we cannot compute dominators
6508 which are needed for loop state verification. As a side-effect
6509 this also compacts blocks.
6510 ??? We cannot remove trivially dead insns here as for example
6511 the DRAP reg on i?86 is not magically live at this point.
6512 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6513 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6515 checking_verify_flow_info ();
6517 /* Initialize pseudos allocated for hard registers. */
6518 emit_initial_value_sets ();
6520 /* And finally unshare all RTL. */
6523 /* There's no need to defer outputting this function any more; we
6524 know we want to output it. */
6525 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6527 /* Now that we're done expanding trees to RTL, we shouldn't have any
6528 more CONCATs anywhere. */
6529 generating_concat_p
= 0;
6534 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6535 /* And the pass manager will dump RTL for us. */
6538 /* If we're emitting a nested function, make sure its parent gets
6539 emitted as well. Doing otherwise confuses debug info. */
6542 for (parent
= DECL_CONTEXT (current_function_decl
);
6543 parent
!= NULL_TREE
;
6544 parent
= get_containing_scope (parent
))
6545 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6546 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6549 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6551 /* After expanding, the return labels are no longer needed. */
6552 return_label
= NULL
;
6553 naked_return_label
= NULL
;
6555 /* After expanding, the tm_restart map is no longer needed. */
6556 if (fun
->gimple_df
->tm_restart
)
6557 fun
->gimple_df
->tm_restart
= NULL
;
6559 /* Tag the blocks with a depth number so that change_scope can find
6560 the common parent easily. */
6561 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6562 default_rtl_profile ();
6564 /* For -dx discard loops now, otherwise IL verify in clean_state will
6566 if (rtl_dump_and_exit
)
6568 cfun
->curr_properties
&= ~PROP_loops
;
6569 loop_optimizer_finalize ();
6572 timevar_pop (TV_POST_EXPAND
);
6580 make_pass_expand (gcc::context
*ctxt
)
6582 return new pass_expand (ctxt
);