1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
33 #include "regs.h" /* For reg_renumber. */
37 #include "diagnostic.h"
38 #include "fold-const.h"
40 #include "stor-layout.h"
42 #include "print-tree.h"
46 #include "cfgcleanup.h"
51 #include "internal-fn.h"
53 #include "gimple-iterator.h"
54 #include "gimple-expr.h"
55 #include "gimple-walk.h"
60 #include "gimple-pretty-print.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "tree-ssa-address.h"
74 #include "tree-chkp.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* This variable holds information helping the rewriting of SSA trees
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple
*currently_expanding_gimple_stmt
;
93 static rtx
expand_debug_expr (tree
);
95 static bool defer_stack_allocation (tree
, bool);
97 static void record_alignment_for_reg_var (unsigned int);
99 /* Return an expression tree corresponding to the RHS of GIMPLE
103 gimple_assign_rhs_to_tree (gimple
*stmt
)
106 enum gimple_rhs_class grhs_class
;
108 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
110 if (grhs_class
== GIMPLE_TERNARY_RHS
)
111 t
= build3 (gimple_assign_rhs_code (stmt
),
112 TREE_TYPE (gimple_assign_lhs (stmt
)),
113 gimple_assign_rhs1 (stmt
),
114 gimple_assign_rhs2 (stmt
),
115 gimple_assign_rhs3 (stmt
));
116 else if (grhs_class
== GIMPLE_BINARY_RHS
)
117 t
= build2 (gimple_assign_rhs_code (stmt
),
118 TREE_TYPE (gimple_assign_lhs (stmt
)),
119 gimple_assign_rhs1 (stmt
),
120 gimple_assign_rhs2 (stmt
));
121 else if (grhs_class
== GIMPLE_UNARY_RHS
)
122 t
= build1 (gimple_assign_rhs_code (stmt
),
123 TREE_TYPE (gimple_assign_lhs (stmt
)),
124 gimple_assign_rhs1 (stmt
));
125 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
127 t
= gimple_assign_rhs1 (stmt
);
128 /* Avoid modifying this tree in place below. */
129 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
130 && gimple_location (stmt
) != EXPR_LOCATION (t
))
131 || (gimple_block (stmt
)
132 && currently_expanding_to_rtl
139 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
140 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
158 leader_merge (tree cur
, tree next
)
160 if (cur
== NULL
|| cur
== next
)
163 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
166 if (DECL_P (next
) && DECL_IGNORED_P (next
))
172 /* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
176 set_rtl (tree t
, rtx x
)
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
180 || (use_register_for_decl (t
)
182 || (GET_CODE (x
) == CONCAT
183 && (REG_P (XEXP (x
, 0))
184 || SUBREG_P (XEXP (x
, 0)))
185 && (REG_P (XEXP (x
, 1))
186 || SUBREG_P (XEXP (x
, 1))))
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
191 || (GET_CODE (x
) == PARALLEL
193 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
194 && (GET_MODE (x
) == BLKmode
195 || !flag_tree_coalesce_vars
)))
196 : (MEM_P (x
) || x
== pc_rtx
197 || (GET_CODE (x
) == CONCAT
198 && MEM_P (XEXP (x
, 0))
199 && MEM_P (XEXP (x
, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
209 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
211 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
212 && (promote_ssa_mode (t
, NULL
) == BLKmode
213 || !flag_tree_coalesce_vars
))
214 || !use_register_for_decl (t
)
215 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
220 tree cur
= NULL_TREE
;
228 else if (SUBREG_P (xm
))
230 gcc_assert (subreg_lowpart_p (xm
));
231 xm
= SUBREG_REG (xm
);
234 else if (GET_CODE (xm
) == CONCAT
)
239 else if (GET_CODE (xm
) == PARALLEL
)
241 xm
= XVECEXP (xm
, 0, 0);
242 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
246 else if (xm
== pc_rtx
)
251 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
256 set_mem_attributes (x
,
257 next
&& TREE_CODE (next
) == SSA_NAME
261 set_reg_attrs_for_decl_rtl (next
, x
);
265 if (TREE_CODE (t
) == SSA_NAME
)
267 int part
= var_to_partition (SA
.map
, t
);
268 if (part
!= NO_PARTITION
)
270 if (SA
.partition_to_pseudo
[part
])
271 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
272 else if (x
!= pc_rtx
)
273 SA
.partition_to_pseudo
[part
] = x
;
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
277 DECL. For PARMs and RESULTs, do so only when setting the
279 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
280 && (VAR_P (SSA_NAME_VAR (t
))
281 || SSA_NAME_IS_DEFAULT_DEF (t
)))
283 tree var
= SSA_NAME_VAR (t
);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var
))
286 SET_DECL_RTL (var
, x
);
287 /* If we have it set already to "multiple places" don't
289 else if (DECL_RTL (var
) == pc_rtx
)
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var
) != x
)
298 SET_DECL_RTL (var
, pc_rtx
);
305 /* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
312 /* Initially, the size of the variable. Later, the size of the partition,
313 if this variable becomes it's partition's representative. */
316 /* The *byte* alignment required for this variable. Or as, with the
317 size, the alignment for this partition. */
320 /* The partition representative. */
321 size_t representative
;
323 /* The next stack variable in the partition, or EOC. */
326 /* The numbers of conflicting stack variables. */
330 #define EOC ((size_t)-1)
332 /* We have an array of such objects while deciding allocation. */
333 static struct stack_var
*stack_vars
;
334 static size_t stack_vars_alloc
;
335 static size_t stack_vars_num
;
336 static hash_map
<tree
, size_t> *decl_to_stack_part
;
338 /* Conflict bitmaps go on this obstack. This allows us to destroy
339 all of them in one big sweep. */
340 static bitmap_obstack stack_var_bitmap_obstack
;
342 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
343 is non-decreasing. */
344 static size_t *stack_vars_sorted
;
346 /* The phase of the stack frame. This is the known misalignment of
347 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
348 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
349 static int frame_phase
;
351 /* Used during expand_used_vars to remember if we saw any decls for
352 which we'd like to enable stack smashing protection. */
353 static bool has_protected_decls
;
355 /* Used during expand_used_vars. Remember if we say a character buffer
356 smaller than our cutoff threshold. Used for -Wstack-protector. */
357 static bool has_short_buffer
;
359 /* Compute the byte alignment to use for DECL. Ignore alignment
360 we can't do with expected alignment of the stack boundary. */
363 align_local_variable (tree decl
)
367 if (TREE_CODE (decl
) == SSA_NAME
)
368 align
= TYPE_ALIGN (TREE_TYPE (decl
));
371 align
= LOCAL_DECL_ALIGNMENT (decl
);
372 SET_DECL_ALIGN (decl
, align
);
374 return align
/ BITS_PER_UNIT
;
377 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
378 down otherwise. Return truncated BASE value. */
380 static inline unsigned HOST_WIDE_INT
381 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
383 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
386 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
387 Return the frame offset. */
390 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
392 HOST_WIDE_INT offset
, new_frame_offset
;
394 if (FRAME_GROWS_DOWNWARD
)
397 = align_base (frame_offset
- frame_phase
- size
,
398 align
, false) + frame_phase
;
399 offset
= new_frame_offset
;
404 = align_base (frame_offset
- frame_phase
, align
, true) + frame_phase
;
405 offset
= new_frame_offset
;
406 new_frame_offset
+= size
;
408 frame_offset
= new_frame_offset
;
410 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
411 frame_offset
= offset
= 0;
416 /* Accumulate DECL into STACK_VARS. */
419 add_stack_var (tree decl
)
423 if (stack_vars_num
>= stack_vars_alloc
)
425 if (stack_vars_alloc
)
426 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
428 stack_vars_alloc
= 32;
430 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
432 if (!decl_to_stack_part
)
433 decl_to_stack_part
= new hash_map
<tree
, size_t>;
435 v
= &stack_vars
[stack_vars_num
];
436 decl_to_stack_part
->put (decl
, stack_vars_num
);
439 tree size
= TREE_CODE (decl
) == SSA_NAME
440 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
441 : DECL_SIZE_UNIT (decl
);
442 v
->size
= tree_to_uhwi (size
);
443 /* Ensure that all variables have size, so that &a != &b for any two
444 variables that are simultaneously live. */
447 v
->alignb
= align_local_variable (decl
);
448 /* An alignment of zero can mightily confuse us later. */
449 gcc_assert (v
->alignb
!= 0);
451 /* All variables are initially in their own partition. */
452 v
->representative
= stack_vars_num
;
455 /* All variables initially conflict with no other. */
458 /* Ensure that this decl doesn't get put onto the list twice. */
459 set_rtl (decl
, pc_rtx
);
464 /* Make the decls associated with luid's X and Y conflict. */
467 add_stack_var_conflict (size_t x
, size_t y
)
469 struct stack_var
*a
= &stack_vars
[x
];
470 struct stack_var
*b
= &stack_vars
[y
];
472 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
474 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
475 bitmap_set_bit (a
->conflicts
, y
);
476 bitmap_set_bit (b
->conflicts
, x
);
479 /* Check whether the decls associated with luid's X and Y conflict. */
482 stack_var_conflict_p (size_t x
, size_t y
)
484 struct stack_var
*a
= &stack_vars
[x
];
485 struct stack_var
*b
= &stack_vars
[y
];
488 /* Partitions containing an SSA name result from gimple registers
489 with things like unsupported modes. They are top-level and
490 hence conflict with everything else. */
491 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
494 if (!a
->conflicts
|| !b
->conflicts
)
496 return bitmap_bit_p (a
->conflicts
, y
);
499 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
500 enter its partition number into bitmap DATA. */
503 visit_op (gimple
*, tree op
, tree
, void *data
)
505 bitmap active
= (bitmap
)data
;
506 op
= get_base_address (op
);
509 && DECL_RTL_IF_SET (op
) == pc_rtx
)
511 size_t *v
= decl_to_stack_part
->get (op
);
513 bitmap_set_bit (active
, *v
);
518 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
519 record conflicts between it and all currently active other partitions
523 visit_conflict (gimple
*, tree op
, tree
, void *data
)
525 bitmap active
= (bitmap
)data
;
526 op
= get_base_address (op
);
529 && DECL_RTL_IF_SET (op
) == pc_rtx
)
531 size_t *v
= decl_to_stack_part
->get (op
);
532 if (v
&& bitmap_set_bit (active
, *v
))
537 gcc_assert (num
< stack_vars_num
);
538 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
539 add_stack_var_conflict (num
, i
);
545 /* Helper routine for add_scope_conflicts, calculating the active partitions
546 at the end of BB, leaving the result in WORK. We're called to generate
547 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
551 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
555 gimple_stmt_iterator gsi
;
556 walk_stmt_load_store_addr_fn visit
;
559 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
560 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
564 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
566 gimple
*stmt
= gsi_stmt (gsi
);
567 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
569 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
571 gimple
*stmt
= gsi_stmt (gsi
);
573 if (gimple_clobber_p (stmt
))
575 tree lhs
= gimple_assign_lhs (stmt
);
577 /* Nested function lowering might introduce LHSs
578 that are COMPONENT_REFs. */
579 if (TREE_CODE (lhs
) != VAR_DECL
)
581 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
582 && (v
= decl_to_stack_part
->get (lhs
)))
583 bitmap_clear_bit (work
, *v
);
585 else if (!is_gimple_debug (stmt
))
588 && visit
== visit_op
)
590 /* If this is the first real instruction in this BB we need
591 to add conflicts for everything live at this point now.
592 Unlike classical liveness for named objects we can't
593 rely on seeing a def/use of the names we're interested in.
594 There might merely be indirect loads/stores. We'd not add any
595 conflicts for such partitions. */
598 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
600 struct stack_var
*a
= &stack_vars
[i
];
602 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
603 bitmap_ior_into (a
->conflicts
, work
);
605 visit
= visit_conflict
;
607 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
612 /* Generate stack partition conflicts between all partitions that are
613 simultaneously live. */
616 add_scope_conflicts (void)
620 bitmap work
= BITMAP_ALLOC (NULL
);
624 /* We approximate the live range of a stack variable by taking the first
625 mention of its name as starting point(s), and by the end-of-scope
626 death clobber added by gimplify as ending point(s) of the range.
627 This overapproximates in the case we for instance moved an address-taken
628 operation upward, without also moving a dereference to it upwards.
629 But it's conservatively correct as a variable never can hold values
630 before its name is mentioned at least once.
632 We then do a mostly classical bitmap liveness algorithm. */
634 FOR_ALL_BB_FN (bb
, cfun
)
635 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
637 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
638 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
645 for (i
= 0; i
< n_bbs
; i
++)
648 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
649 active
= (bitmap
)bb
->aux
;
650 add_scope_conflicts_1 (bb
, work
, false);
651 if (bitmap_ior_into (active
, work
))
656 FOR_EACH_BB_FN (bb
, cfun
)
657 add_scope_conflicts_1 (bb
, work
, true);
661 FOR_ALL_BB_FN (bb
, cfun
)
662 BITMAP_FREE (bb
->aux
);
665 /* A subroutine of partition_stack_vars. A comparison function for qsort,
666 sorting an array of indices by the properties of the object. */
669 stack_var_cmp (const void *a
, const void *b
)
671 size_t ia
= *(const size_t *)a
;
672 size_t ib
= *(const size_t *)b
;
673 unsigned int aligna
= stack_vars
[ia
].alignb
;
674 unsigned int alignb
= stack_vars
[ib
].alignb
;
675 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
676 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
677 tree decla
= stack_vars
[ia
].decl
;
678 tree declb
= stack_vars
[ib
].decl
;
680 unsigned int uida
, uidb
;
682 /* Primary compare on "large" alignment. Large comes first. */
683 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
684 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
685 if (largea
!= largeb
)
686 return (int)largeb
- (int)largea
;
688 /* Secondary compare on size, decreasing */
694 /* Tertiary compare on true alignment, decreasing. */
700 /* Final compare on ID for sort stability, increasing.
701 Two SSA names are compared by their version, SSA names come before
702 non-SSA names, and two normal decls are compared by their DECL_UID. */
703 if (TREE_CODE (decla
) == SSA_NAME
)
705 if (TREE_CODE (declb
) == SSA_NAME
)
706 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
710 else if (TREE_CODE (declb
) == SSA_NAME
)
713 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
721 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
722 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
724 /* If the points-to solution *PI points to variables that are in a partition
725 together with other variables add all partition members to the pointed-to
729 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
730 part_hashmap
*decls_to_partitions
,
731 hash_set
<bitmap
> *visited
, bitmap temp
)
739 /* The pointed-to vars bitmap is shared, it is enough to
741 || visited
->add (pt
->vars
))
746 /* By using a temporary bitmap to store all members of the partitions
747 we have to add we make sure to visit each of the partitions only
749 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
751 || !bitmap_bit_p (temp
, i
))
752 && (part
= decls_to_partitions
->get (i
)))
753 bitmap_ior_into (temp
, *part
);
754 if (!bitmap_empty_p (temp
))
755 bitmap_ior_into (pt
->vars
, temp
);
758 /* Update points-to sets based on partition info, so we can use them on RTL.
759 The bitmaps representing stack partitions will be saved until expand,
760 where partitioned decls used as bases in memory expressions will be
764 update_alias_info_with_stack_vars (void)
766 part_hashmap
*decls_to_partitions
= NULL
;
768 tree var
= NULL_TREE
;
770 for (i
= 0; i
< stack_vars_num
; i
++)
774 struct ptr_info_def
*pi
;
776 /* Not interested in partitions with single variable. */
777 if (stack_vars
[i
].representative
!= i
778 || stack_vars
[i
].next
== EOC
)
781 if (!decls_to_partitions
)
783 decls_to_partitions
= new part_hashmap
;
784 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
787 /* Create an SSA_NAME that points to the partition for use
788 as base during alias-oracle queries on RTL for bases that
789 have been partitioned. */
790 if (var
== NULL_TREE
)
791 var
= create_tmp_var (ptr_type_node
);
792 name
= make_ssa_name (var
);
794 /* Create bitmaps representing partitions. They will be used for
795 points-to sets later, so use GGC alloc. */
796 part
= BITMAP_GGC_ALLOC ();
797 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
799 tree decl
= stack_vars
[j
].decl
;
800 unsigned int uid
= DECL_PT_UID (decl
);
801 bitmap_set_bit (part
, uid
);
802 decls_to_partitions
->put (uid
, part
);
803 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
804 if (TREE_ADDRESSABLE (decl
))
805 TREE_ADDRESSABLE (name
) = 1;
808 /* Make the SSA name point to all partition members. */
809 pi
= get_ptr_info (name
);
810 pt_solution_set (&pi
->pt
, part
, false);
813 /* Make all points-to sets that contain one member of a partition
814 contain all members of the partition. */
815 if (decls_to_partitions
)
818 hash_set
<bitmap
> visited
;
819 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
821 for (i
= 1; i
< num_ssa_names
; i
++)
823 tree name
= ssa_name (i
);
824 struct ptr_info_def
*pi
;
827 && POINTER_TYPE_P (TREE_TYPE (name
))
828 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
829 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
833 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
834 decls_to_partitions
, &visited
, temp
);
836 delete decls_to_partitions
;
841 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
842 partitioning algorithm. Partitions A and B are known to be non-conflicting.
843 Merge them into a single partition A. */
846 union_stack_vars (size_t a
, size_t b
)
848 struct stack_var
*vb
= &stack_vars
[b
];
852 gcc_assert (stack_vars
[b
].next
== EOC
);
853 /* Add B to A's partition. */
854 stack_vars
[b
].next
= stack_vars
[a
].next
;
855 stack_vars
[b
].representative
= a
;
856 stack_vars
[a
].next
= b
;
858 /* Update the required alignment of partition A to account for B. */
859 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
860 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
862 /* Update the interference graph and merge the conflicts. */
865 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
866 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
867 BITMAP_FREE (vb
->conflicts
);
871 /* Return true if the current function should have its stack frame
872 protected by address sanitizer. */
875 asan_sanitize_stack_p (void)
877 return ((flag_sanitize
& SANITIZE_ADDRESS
)
879 && !lookup_attribute ("no_sanitize_address",
880 DECL_ATTRIBUTES (current_function_decl
)));
883 /* A subroutine of expand_used_vars. Binpack the variables into
884 partitions constrained by the interference graph. The overall
885 algorithm used is as follows:
887 Sort the objects by size in descending order.
892 Look for the largest non-conflicting object B with size <= S.
899 partition_stack_vars (void)
901 size_t si
, sj
, n
= stack_vars_num
;
903 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
904 for (si
= 0; si
< n
; ++si
)
905 stack_vars_sorted
[si
] = si
;
910 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
912 for (si
= 0; si
< n
; ++si
)
914 size_t i
= stack_vars_sorted
[si
];
915 unsigned int ialign
= stack_vars
[i
].alignb
;
916 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
918 /* Ignore objects that aren't partition representatives. If we
919 see a var that is not a partition representative, it must
920 have been merged earlier. */
921 if (stack_vars
[i
].representative
!= i
)
924 for (sj
= si
+ 1; sj
< n
; ++sj
)
926 size_t j
= stack_vars_sorted
[sj
];
927 unsigned int jalign
= stack_vars
[j
].alignb
;
928 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
930 /* Ignore objects that aren't partition representatives. */
931 if (stack_vars
[j
].representative
!= j
)
934 /* Do not mix objects of "small" (supported) alignment
935 and "large" (unsupported) alignment. */
936 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
937 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
940 /* For Address Sanitizer do not mix objects with different
941 sizes, as the shorter vars wouldn't be adequately protected.
942 Don't do that for "large" (unsupported) alignment objects,
943 those aren't protected anyway. */
944 if (asan_sanitize_stack_p () && isize
!= jsize
945 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
948 /* Ignore conflicting objects. */
949 if (stack_var_conflict_p (i
, j
))
952 /* UNION the objects, placing J at OFFSET. */
953 union_stack_vars (i
, j
);
957 update_alias_info_with_stack_vars ();
960 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
963 dump_stack_var_partition (void)
965 size_t si
, i
, j
, n
= stack_vars_num
;
967 for (si
= 0; si
< n
; ++si
)
969 i
= stack_vars_sorted
[si
];
971 /* Skip variables that aren't partition representatives, for now. */
972 if (stack_vars
[i
].representative
!= i
)
975 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
976 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
977 stack_vars
[i
].alignb
);
979 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
981 fputc ('\t', dump_file
);
982 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
984 fputc ('\n', dump_file
);
988 /* Assign rtl to DECL at BASE + OFFSET. */
991 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
992 HOST_WIDE_INT offset
)
997 /* If this fails, we've overflowed the stack frame. Error nicely? */
998 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
1000 x
= plus_constant (Pmode
, base
, offset
);
1001 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
1002 ? TYPE_MODE (TREE_TYPE (decl
))
1003 : DECL_MODE (SSAVAR (decl
)), x
);
1005 if (TREE_CODE (decl
) != SSA_NAME
)
1007 /* Set alignment we actually gave this decl if it isn't an SSA name.
1008 If it is we generate stack slots only accidentally so it isn't as
1009 important, we'll simply use the alignment that is already set. */
1010 if (base
== virtual_stack_vars_rtx
)
1011 offset
-= frame_phase
;
1012 align
= offset
& -offset
;
1013 align
*= BITS_PER_UNIT
;
1014 if (align
== 0 || align
> base_align
)
1017 /* One would think that we could assert that we're not decreasing
1018 alignment here, but (at least) the i386 port does exactly this
1019 via the MINIMUM_ALIGNMENT hook. */
1021 SET_DECL_ALIGN (decl
, align
);
1022 DECL_USER_ALIGN (decl
) = 0;
1028 struct stack_vars_data
1030 /* Vector of offset pairs, always end of some padding followed
1031 by start of the padding that needs Address Sanitizer protection.
1032 The vector is in reversed, highest offset pairs come first. */
1033 vec
<HOST_WIDE_INT
> asan_vec
;
1035 /* Vector of partition representative decls in between the paddings. */
1036 vec
<tree
> asan_decl_vec
;
1038 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1041 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1042 unsigned int asan_alignb
;
1045 /* A subroutine of expand_used_vars. Give each partition representative
1046 a unique location within the stack frame. Update each partition member
1047 with that location. */
1050 expand_stack_vars (bool (*pred
) (size_t), struct stack_vars_data
*data
)
1052 size_t si
, i
, j
, n
= stack_vars_num
;
1053 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
1054 rtx large_base
= NULL
;
1055 unsigned large_align
= 0;
1058 /* Determine if there are any variables requiring "large" alignment.
1059 Since these are dynamically allocated, we only process these if
1060 no predicate involved. */
1061 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1062 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1064 /* Find the total size of these variables. */
1065 for (si
= 0; si
< n
; ++si
)
1069 i
= stack_vars_sorted
[si
];
1070 alignb
= stack_vars
[i
].alignb
;
1072 /* All "large" alignment decls come before all "small" alignment
1073 decls, but "large" alignment decls are not sorted based on
1074 their alignment. Increase large_align to track the largest
1075 required alignment. */
1076 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1077 large_align
= alignb
* BITS_PER_UNIT
;
1079 /* Stop when we get to the first decl with "small" alignment. */
1080 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1083 /* Skip variables that aren't partition representatives. */
1084 if (stack_vars
[i
].representative
!= i
)
1087 /* Skip variables that have already had rtl assigned. See also
1088 add_stack_var where we perpetrate this pc_rtx hack. */
1089 decl
= stack_vars
[i
].decl
;
1090 if (TREE_CODE (decl
) == SSA_NAME
1091 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1092 : DECL_RTL (decl
) != pc_rtx
)
1095 large_size
+= alignb
- 1;
1096 large_size
&= -(HOST_WIDE_INT
)alignb
;
1097 large_size
+= stack_vars
[i
].size
;
1100 /* If there were any, allocate space. */
1102 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
1106 for (si
= 0; si
< n
; ++si
)
1109 unsigned base_align
, alignb
;
1110 HOST_WIDE_INT offset
;
1112 i
= stack_vars_sorted
[si
];
1114 /* Skip variables that aren't partition representatives, for now. */
1115 if (stack_vars
[i
].representative
!= i
)
1118 /* Skip variables that have already had rtl assigned. See also
1119 add_stack_var where we perpetrate this pc_rtx hack. */
1120 decl
= stack_vars
[i
].decl
;
1121 if (TREE_CODE (decl
) == SSA_NAME
1122 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1123 : DECL_RTL (decl
) != pc_rtx
)
1126 /* Check the predicate to see whether this variable should be
1127 allocated in this pass. */
1128 if (pred
&& !pred (i
))
1131 alignb
= stack_vars
[i
].alignb
;
1132 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1134 base
= virtual_stack_vars_rtx
;
1135 if (asan_sanitize_stack_p () && pred
)
1137 HOST_WIDE_INT prev_offset
1138 = align_base (frame_offset
,
1139 MAX (alignb
, ASAN_RED_ZONE_SIZE
),
1140 !FRAME_GROWS_DOWNWARD
);
1141 tree repr_decl
= NULL_TREE
;
1143 = alloc_stack_frame_space (stack_vars
[i
].size
1144 + ASAN_RED_ZONE_SIZE
,
1145 MAX (alignb
, ASAN_RED_ZONE_SIZE
));
1147 data
->asan_vec
.safe_push (prev_offset
);
1148 data
->asan_vec
.safe_push (offset
+ stack_vars
[i
].size
);
1149 /* Find best representative of the partition.
1150 Prefer those with DECL_NAME, even better
1151 satisfying asan_protect_stack_decl predicate. */
1152 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1153 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1154 && DECL_NAME (stack_vars
[j
].decl
))
1156 repr_decl
= stack_vars
[j
].decl
;
1159 else if (repr_decl
== NULL_TREE
1160 && DECL_P (stack_vars
[j
].decl
)
1161 && DECL_NAME (stack_vars
[j
].decl
))
1162 repr_decl
= stack_vars
[j
].decl
;
1163 if (repr_decl
== NULL_TREE
)
1164 repr_decl
= stack_vars
[i
].decl
;
1165 data
->asan_decl_vec
.safe_push (repr_decl
);
1166 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1167 if (data
->asan_base
== NULL
)
1168 data
->asan_base
= gen_reg_rtx (Pmode
);
1169 base
= data
->asan_base
;
1171 if (!STRICT_ALIGNMENT
)
1172 base_align
= crtl
->max_used_stack_slot_alignment
;
1174 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1175 GET_MODE_ALIGNMENT (SImode
)
1176 << ASAN_SHADOW_SHIFT
);
1180 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1181 base_align
= crtl
->max_used_stack_slot_alignment
;
1186 /* Large alignment is only processed in the last pass. */
1189 gcc_assert (large_base
!= NULL
);
1191 large_alloc
+= alignb
- 1;
1192 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
1193 offset
= large_alloc
;
1194 large_alloc
+= stack_vars
[i
].size
;
1197 base_align
= large_align
;
1200 /* Create rtl for each variable based on their location within the
1202 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1204 expand_one_stack_var_at (stack_vars
[j
].decl
,
1210 gcc_assert (large_alloc
== large_size
);
1213 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1214 static HOST_WIDE_INT
1215 account_stack_vars (void)
1217 size_t si
, j
, i
, n
= stack_vars_num
;
1218 HOST_WIDE_INT size
= 0;
1220 for (si
= 0; si
< n
; ++si
)
1222 i
= stack_vars_sorted
[si
];
1224 /* Skip variables that aren't partition representatives, for now. */
1225 if (stack_vars
[i
].representative
!= i
)
1228 size
+= stack_vars
[i
].size
;
1229 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1230 set_rtl (stack_vars
[j
].decl
, NULL
);
1235 /* Record the RTL assignment X for the default def of PARM. */
1238 set_parm_rtl (tree parm
, rtx x
)
1240 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1241 || TREE_CODE (parm
) == RESULT_DECL
);
1243 if (x
&& !MEM_P (x
))
1245 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1246 TYPE_MODE (TREE_TYPE (parm
)),
1247 TYPE_ALIGN (TREE_TYPE (parm
)));
1249 /* If the variable alignment is very large we'll dynamicaly
1250 allocate it, which means that in-frame portion is just a
1251 pointer. ??? We've got a pseudo for sure here, do we
1252 actually dynamically allocate its spilling area if needed?
1253 ??? Isn't it a problem when POINTER_SIZE also exceeds
1254 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1255 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1256 align
= POINTER_SIZE
;
1258 record_alignment_for_reg_var (align
);
1261 tree ssa
= ssa_default_def (cfun
, parm
);
1263 return set_rtl (parm
, x
);
1265 int part
= var_to_partition (SA
.map
, ssa
);
1266 gcc_assert (part
!= NO_PARTITION
);
1268 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1269 gcc_assert (changed
);
1272 gcc_assert (DECL_RTL (parm
) == x
);
1275 /* A subroutine of expand_one_var. Called to immediately assign rtl
1276 to a variable to be allocated in the stack frame. */
1279 expand_one_stack_var_1 (tree var
)
1281 HOST_WIDE_INT size
, offset
;
1282 unsigned byte_align
;
1284 if (TREE_CODE (var
) == SSA_NAME
)
1286 tree type
= TREE_TYPE (var
);
1287 size
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1288 byte_align
= TYPE_ALIGN_UNIT (type
);
1292 size
= tree_to_uhwi (DECL_SIZE_UNIT (var
));
1293 byte_align
= align_local_variable (var
);
1296 /* We handle highly aligned variables in expand_stack_vars. */
1297 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1299 offset
= alloc_stack_frame_space (size
, byte_align
);
1301 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1302 crtl
->max_used_stack_slot_alignment
, offset
);
1305 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1306 already assigned some MEM. */
1309 expand_one_stack_var (tree var
)
1311 if (TREE_CODE (var
) == SSA_NAME
)
1313 int part
= var_to_partition (SA
.map
, var
);
1314 if (part
!= NO_PARTITION
)
1316 rtx x
= SA
.partition_to_pseudo
[part
];
1318 gcc_assert (MEM_P (x
));
1323 return expand_one_stack_var_1 (var
);
1326 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1327 that will reside in a hard register. */
1330 expand_one_hard_reg_var (tree var
)
1332 rest_of_decl_compilation (var
, 0, 0);
1335 /* Record the alignment requirements of some variable assigned to a
1339 record_alignment_for_reg_var (unsigned int align
)
1341 if (SUPPORTS_STACK_ALIGNMENT
1342 && crtl
->stack_alignment_estimated
< align
)
1344 /* stack_alignment_estimated shouldn't change after stack
1345 realign decision made */
1346 gcc_assert (!crtl
->stack_realign_processed
);
1347 crtl
->stack_alignment_estimated
= align
;
1350 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1351 So here we only make sure stack_alignment_needed >= align. */
1352 if (crtl
->stack_alignment_needed
< align
)
1353 crtl
->stack_alignment_needed
= align
;
1354 if (crtl
->max_used_stack_slot_alignment
< align
)
1355 crtl
->max_used_stack_slot_alignment
= align
;
1358 /* Create RTL for an SSA partition. */
1361 expand_one_ssa_partition (tree var
)
1363 int part
= var_to_partition (SA
.map
, var
);
1364 gcc_assert (part
!= NO_PARTITION
);
1366 if (SA
.partition_to_pseudo
[part
])
1369 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1370 TYPE_MODE (TREE_TYPE (var
)),
1371 TYPE_ALIGN (TREE_TYPE (var
)));
1373 /* If the variable alignment is very large we'll dynamicaly allocate
1374 it, which means that in-frame portion is just a pointer. */
1375 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1376 align
= POINTER_SIZE
;
1378 record_alignment_for_reg_var (align
);
1380 if (!use_register_for_decl (var
))
1382 if (defer_stack_allocation (var
, true))
1383 add_stack_var (var
);
1385 expand_one_stack_var_1 (var
);
1389 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1391 rtx x
= gen_reg_rtx (reg_mode
);
1396 /* Record the association between the RTL generated for partition PART
1397 and the underlying variable of the SSA_NAME VAR. */
1400 adjust_one_expanded_partition_var (tree var
)
1405 tree decl
= SSA_NAME_VAR (var
);
1407 int part
= var_to_partition (SA
.map
, var
);
1408 if (part
== NO_PARTITION
)
1411 rtx x
= SA
.partition_to_pseudo
[part
];
1420 /* Note if the object is a user variable. */
1421 if (decl
&& !DECL_ARTIFICIAL (decl
))
1424 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1425 mark_reg_pointer (x
, get_pointer_alignment (var
));
1428 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1429 that will reside in a pseudo register. */
1432 expand_one_register_var (tree var
)
1434 if (TREE_CODE (var
) == SSA_NAME
)
1436 int part
= var_to_partition (SA
.map
, var
);
1437 if (part
!= NO_PARTITION
)
1439 rtx x
= SA
.partition_to_pseudo
[part
];
1441 gcc_assert (REG_P (x
));
1448 tree type
= TREE_TYPE (decl
);
1449 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1450 rtx x
= gen_reg_rtx (reg_mode
);
1454 /* Note if the object is a user variable. */
1455 if (!DECL_ARTIFICIAL (decl
))
1458 if (POINTER_TYPE_P (type
))
1459 mark_reg_pointer (x
, get_pointer_alignment (var
));
1462 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1463 has some associated error, e.g. its type is error-mark. We just need
1464 to pick something that won't crash the rest of the compiler. */
1467 expand_one_error_var (tree var
)
1469 machine_mode mode
= DECL_MODE (var
);
1472 if (mode
== BLKmode
)
1473 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1474 else if (mode
== VOIDmode
)
1477 x
= gen_reg_rtx (mode
);
1479 SET_DECL_RTL (var
, x
);
1482 /* A subroutine of expand_one_var. VAR is a variable that will be
1483 allocated to the local stack frame. Return true if we wish to
1484 add VAR to STACK_VARS so that it will be coalesced with other
1485 variables. Return false to allocate VAR immediately.
1487 This function is used to reduce the number of variables considered
1488 for coalescing, which reduces the size of the quadratic problem. */
1491 defer_stack_allocation (tree var
, bool toplevel
)
1493 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1494 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1495 : DECL_SIZE_UNIT (var
);
1497 /* Whether the variable is small enough for immediate allocation not to be
1498 a problem with regard to the frame size. */
1500 = ((HOST_WIDE_INT
) tree_to_uhwi (size_unit
)
1501 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING
));
1503 /* If stack protection is enabled, *all* stack variables must be deferred,
1504 so that we can re-order the strings to the top of the frame.
1505 Similarly for Address Sanitizer. */
1506 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1509 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1510 ? TYPE_ALIGN (TREE_TYPE (var
))
1513 /* We handle "large" alignment via dynamic allocation. We want to handle
1514 this extra complication in only one place, so defer them. */
1515 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1518 bool ignored
= TREE_CODE (var
) == SSA_NAME
1519 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1520 : DECL_IGNORED_P (var
);
1522 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1523 might be detached from their block and appear at toplevel when we reach
1524 here. We want to coalesce them with variables from other blocks when
1525 the immediate contribution to the frame size would be noticeable. */
1526 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1529 /* Variables declared in the outermost scope automatically conflict
1530 with every other variable. The only reason to want to defer them
1531 at all is that, after sorting, we can more efficiently pack
1532 small variables in the stack frame. Continue to defer at -O2. */
1533 if (toplevel
&& optimize
< 2)
1536 /* Without optimization, *most* variables are allocated from the
1537 stack, which makes the quadratic problem large exactly when we
1538 want compilation to proceed as quickly as possible. On the
1539 other hand, we don't want the function's stack frame size to
1540 get completely out of hand. So we avoid adding scalars and
1541 "small" aggregates to the list at all. */
1542 if (optimize
== 0 && smallish
)
1548 /* A subroutine of expand_used_vars. Expand one variable according to
1549 its flavor. Variables to be placed on the stack are not actually
1550 expanded yet, merely recorded.
1551 When REALLY_EXPAND is false, only add stack values to be allocated.
1552 Return stack usage this variable is supposed to take.
1555 static HOST_WIDE_INT
1556 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1558 unsigned int align
= BITS_PER_UNIT
;
1563 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
1565 if (is_global_var (var
))
1568 /* Because we don't know if VAR will be in register or on stack,
1569 we conservatively assume it will be on stack even if VAR is
1570 eventually put into register after RA pass. For non-automatic
1571 variables, which won't be on stack, we collect alignment of
1572 type and ignore user specified alignment. Similarly for
1573 SSA_NAMEs for which use_register_for_decl returns true. */
1574 if (TREE_STATIC (var
)
1575 || DECL_EXTERNAL (var
)
1576 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1577 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1578 TYPE_MODE (TREE_TYPE (var
)),
1579 TYPE_ALIGN (TREE_TYPE (var
)));
1580 else if (DECL_HAS_VALUE_EXPR_P (var
)
1581 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1582 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1583 or variables which were assigned a stack slot already by
1584 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1585 changed from the offset chosen to it. */
1586 align
= crtl
->stack_alignment_estimated
;
1588 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1590 /* If the variable alignment is very large we'll dynamicaly allocate
1591 it, which means that in-frame portion is just a pointer. */
1592 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1593 align
= POINTER_SIZE
;
1596 record_alignment_for_reg_var (align
);
1598 if (TREE_CODE (origvar
) == SSA_NAME
)
1600 gcc_assert (TREE_CODE (var
) != VAR_DECL
1601 || (!DECL_EXTERNAL (var
)
1602 && !DECL_HAS_VALUE_EXPR_P (var
)
1603 && !TREE_STATIC (var
)
1604 && TREE_TYPE (var
) != error_mark_node
1605 && !DECL_HARD_REGISTER (var
)
1608 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1610 else if (DECL_EXTERNAL (var
))
1612 else if (DECL_HAS_VALUE_EXPR_P (var
))
1614 else if (TREE_STATIC (var
))
1616 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1618 else if (TREE_TYPE (var
) == error_mark_node
)
1621 expand_one_error_var (var
);
1623 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1627 expand_one_hard_reg_var (var
);
1628 if (!DECL_HARD_REGISTER (var
))
1629 /* Invalid register specification. */
1630 expand_one_error_var (var
);
1633 else if (use_register_for_decl (var
))
1636 expand_one_register_var (origvar
);
1638 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1640 /* Reject variables which cover more than half of the address-space. */
1643 error ("size of variable %q+D is too large", var
);
1644 expand_one_error_var (var
);
1647 else if (defer_stack_allocation (var
, toplevel
))
1648 add_stack_var (origvar
);
1653 if (lookup_attribute ("naked",
1654 DECL_ATTRIBUTES (current_function_decl
)))
1655 error ("cannot allocate stack for variable %q+D, naked function.",
1658 expand_one_stack_var (origvar
);
1662 return tree_to_uhwi (DECL_SIZE_UNIT (var
));
1667 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1668 expanding variables. Those variables that can be put into registers
1669 are allocated pseudos; those that can't are put on the stack.
1671 TOPLEVEL is true if this is the outermost BLOCK. */
1674 expand_used_vars_for_block (tree block
, bool toplevel
)
1678 /* Expand all variables at this level. */
1679 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1681 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1682 || !DECL_NONSHAREABLE (t
)))
1683 expand_one_var (t
, toplevel
, true);
1685 /* Expand all variables at containing levels. */
1686 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1687 expand_used_vars_for_block (t
, false);
1690 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1691 and clear TREE_USED on all local variables. */
1694 clear_tree_used (tree block
)
1698 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1699 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1700 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1701 || !DECL_NONSHAREABLE (t
))
1704 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1705 clear_tree_used (t
);
1709 SPCT_FLAG_DEFAULT
= 1,
1711 SPCT_FLAG_STRONG
= 3,
1712 SPCT_FLAG_EXPLICIT
= 4
1715 /* Examine TYPE and determine a bit mask of the following features. */
1717 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1718 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1719 #define SPCT_HAS_ARRAY 4
1720 #define SPCT_HAS_AGGREGATE 8
1723 stack_protect_classify_type (tree type
)
1725 unsigned int ret
= 0;
1728 switch (TREE_CODE (type
))
1731 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1732 if (t
== char_type_node
1733 || t
== signed_char_type_node
1734 || t
== unsigned_char_type_node
)
1736 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1737 unsigned HOST_WIDE_INT len
;
1739 if (!TYPE_SIZE_UNIT (type
)
1740 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1743 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1746 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1748 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1751 ret
= SPCT_HAS_ARRAY
;
1755 case QUAL_UNION_TYPE
:
1757 ret
= SPCT_HAS_AGGREGATE
;
1758 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1759 if (TREE_CODE (t
) == FIELD_DECL
)
1760 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1770 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1771 part of the local stack frame. Remember if we ever return nonzero for
1772 any variable in this function. The return value is the phase number in
1773 which the variable should be allocated. */
1776 stack_protect_decl_phase (tree decl
)
1778 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1781 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1782 has_short_buffer
= true;
1784 if (flag_stack_protect
== SPCT_FLAG_ALL
1785 || flag_stack_protect
== SPCT_FLAG_STRONG
1786 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1787 && lookup_attribute ("stack_protect",
1788 DECL_ATTRIBUTES (current_function_decl
))))
1790 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1791 && !(bits
& SPCT_HAS_AGGREGATE
))
1793 else if (bits
& SPCT_HAS_ARRAY
)
1797 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1800 has_protected_decls
= true;
1805 /* Two helper routines that check for phase 1 and phase 2. These are used
1806 as callbacks for expand_stack_vars. */
1809 stack_protect_decl_phase_1 (size_t i
)
1811 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1815 stack_protect_decl_phase_2 (size_t i
)
1817 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1820 /* And helper function that checks for asan phase (with stack protector
1821 it is phase 3). This is used as callback for expand_stack_vars.
1822 Returns true if any of the vars in the partition need to be protected. */
1825 asan_decl_phase_3 (size_t i
)
1829 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1831 i
= stack_vars
[i
].next
;
1836 /* Ensure that variables in different stack protection phases conflict
1837 so that they are not merged and share the same stack slot. */
1840 add_stack_protection_conflicts (void)
1842 size_t i
, j
, n
= stack_vars_num
;
1843 unsigned char *phase
;
1845 phase
= XNEWVEC (unsigned char, n
);
1846 for (i
= 0; i
< n
; ++i
)
1847 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1849 for (i
= 0; i
< n
; ++i
)
1851 unsigned char ph_i
= phase
[i
];
1852 for (j
= i
+ 1; j
< n
; ++j
)
1853 if (ph_i
!= phase
[j
])
1854 add_stack_var_conflict (i
, j
);
1860 /* Create a decl for the guard at the top of the stack frame. */
1863 create_stack_guard (void)
1865 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1866 VAR_DECL
, NULL
, ptr_type_node
);
1867 TREE_THIS_VOLATILE (guard
) = 1;
1868 TREE_USED (guard
) = 1;
1869 expand_one_stack_var (guard
);
1870 crtl
->stack_protect_guard
= guard
;
1873 /* Prepare for expanding variables. */
1875 init_vars_expansion (void)
1877 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1878 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1880 /* A map from decl to stack partition. */
1881 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1883 /* Initialize local stack smashing state. */
1884 has_protected_decls
= false;
1885 has_short_buffer
= false;
1888 /* Free up stack variable graph data. */
1890 fini_vars_expansion (void)
1892 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1894 XDELETEVEC (stack_vars
);
1895 if (stack_vars_sorted
)
1896 XDELETEVEC (stack_vars_sorted
);
1898 stack_vars_sorted
= NULL
;
1899 stack_vars_alloc
= stack_vars_num
= 0;
1900 delete decl_to_stack_part
;
1901 decl_to_stack_part
= NULL
;
1904 /* Make a fair guess for the size of the stack frame of the function
1905 in NODE. This doesn't have to be exact, the result is only used in
1906 the inline heuristics. So we don't want to run the full stack var
1907 packing algorithm (which is quadratic in the number of stack vars).
1908 Instead, we calculate the total size of all stack vars. This turns
1909 out to be a pretty fair estimate -- packing of stack vars doesn't
1910 happen very often. */
1913 estimated_stack_frame_size (struct cgraph_node
*node
)
1915 HOST_WIDE_INT size
= 0;
1918 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1922 init_vars_expansion ();
1924 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1925 if (auto_var_in_fn_p (var
, fn
->decl
))
1926 size
+= expand_one_var (var
, true, false);
1928 if (stack_vars_num
> 0)
1930 /* Fake sorting the stack vars for account_stack_vars (). */
1931 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1932 for (i
= 0; i
< stack_vars_num
; ++i
)
1933 stack_vars_sorted
[i
] = i
;
1934 size
+= account_stack_vars ();
1937 fini_vars_expansion ();
1942 /* Helper routine to check if a record or union contains an array field. */
1945 record_or_union_type_has_array_p (const_tree tree_type
)
1947 tree fields
= TYPE_FIELDS (tree_type
);
1950 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
1951 if (TREE_CODE (f
) == FIELD_DECL
)
1953 tree field_type
= TREE_TYPE (f
);
1954 if (RECORD_OR_UNION_TYPE_P (field_type
)
1955 && record_or_union_type_has_array_p (field_type
))
1957 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
1963 /* Check if the current function has local referenced variables that
1964 have their addresses taken, contain an array, or are arrays. */
1967 stack_protect_decl_p ()
1972 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1973 if (!is_global_var (var
))
1975 tree var_type
= TREE_TYPE (var
);
1976 if (TREE_CODE (var
) == VAR_DECL
1977 && (TREE_CODE (var_type
) == ARRAY_TYPE
1978 || TREE_ADDRESSABLE (var
)
1979 || (RECORD_OR_UNION_TYPE_P (var_type
)
1980 && record_or_union_type_has_array_p (var_type
))))
1986 /* Check if the current function has calls that use a return slot. */
1989 stack_protect_return_slot_p ()
1993 FOR_ALL_BB_FN (bb
, cfun
)
1994 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
1995 !gsi_end_p (gsi
); gsi_next (&gsi
))
1997 gimple
*stmt
= gsi_stmt (gsi
);
1998 /* This assumes that calls to internal-only functions never
1999 use a return slot. */
2000 if (is_gimple_call (stmt
)
2001 && !gimple_call_internal_p (stmt
)
2002 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2003 gimple_call_fndecl (stmt
)))
2009 /* Expand all variables used in the function. */
2012 expand_used_vars (void)
2014 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2015 vec
<tree
> maybe_local_decls
= vNULL
;
2016 rtx_insn
*var_end_seq
= NULL
;
2019 bool gen_stack_protect_signal
= false;
2021 /* Compute the phase of the stack frame for this function. */
2023 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2024 int off
= STARTING_FRAME_OFFSET
% align
;
2025 frame_phase
= off
? align
- off
: 0;
2028 /* Set TREE_USED on all variables in the local_decls. */
2029 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2030 TREE_USED (var
) = 1;
2031 /* Clear TREE_USED on all variables associated with a block scope. */
2032 clear_tree_used (DECL_INITIAL (current_function_decl
));
2034 init_vars_expansion ();
2036 if (targetm
.use_pseudo_pic_reg ())
2037 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2039 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2041 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2044 tree var
= partition_to_var (SA
.map
, i
);
2046 gcc_assert (!virtual_operand_p (var
));
2048 expand_one_ssa_partition (var
);
2051 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2052 gen_stack_protect_signal
2053 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2055 /* At this point all variables on the local_decls with TREE_USED
2056 set are not associated with any block scope. Lay them out. */
2058 len
= vec_safe_length (cfun
->local_decls
);
2059 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2061 bool expand_now
= false;
2063 /* Expanded above already. */
2064 if (is_gimple_reg (var
))
2066 TREE_USED (var
) = 0;
2069 /* We didn't set a block for static or extern because it's hard
2070 to tell the difference between a global variable (re)declared
2071 in a local scope, and one that's really declared there to
2072 begin with. And it doesn't really matter much, since we're
2073 not giving them stack space. Expand them now. */
2074 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2077 /* Expand variables not associated with any block now. Those created by
2078 the optimizers could be live anywhere in the function. Those that
2079 could possibly have been scoped originally and detached from their
2080 block will have their allocation deferred so we coalesce them with
2081 others when optimization is enabled. */
2082 else if (TREE_USED (var
))
2085 /* Finally, mark all variables on the list as used. We'll use
2086 this in a moment when we expand those associated with scopes. */
2087 TREE_USED (var
) = 1;
2090 expand_one_var (var
, true, true);
2093 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2095 rtx rtl
= DECL_RTL_IF_SET (var
);
2097 /* Keep artificial non-ignored vars in cfun->local_decls
2098 chain until instantiate_decls. */
2099 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2100 add_local_decl (cfun
, var
);
2101 else if (rtl
== NULL_RTX
)
2102 /* If rtl isn't set yet, which can happen e.g. with
2103 -fstack-protector, retry before returning from this
2105 maybe_local_decls
.safe_push (var
);
2109 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2111 +-----------------+-----------------+
2112 | ...processed... | ...duplicates...|
2113 +-----------------+-----------------+
2115 +-- LEN points here.
2117 We just want the duplicates, as those are the artificial
2118 non-ignored vars that we want to keep until instantiate_decls.
2119 Move them down and truncate the array. */
2120 if (!vec_safe_is_empty (cfun
->local_decls
))
2121 cfun
->local_decls
->block_remove (0, len
);
2123 /* At this point, all variables within the block tree with TREE_USED
2124 set are actually used by the optimized function. Lay them out. */
2125 expand_used_vars_for_block (outer_block
, true);
2127 if (stack_vars_num
> 0)
2129 add_scope_conflicts ();
2131 /* If stack protection is enabled, we don't share space between
2132 vulnerable data and non-vulnerable data. */
2133 if (flag_stack_protect
!= 0
2134 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2135 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2136 && lookup_attribute ("stack_protect",
2137 DECL_ATTRIBUTES (current_function_decl
)))))
2138 add_stack_protection_conflicts ();
2140 /* Now that we have collected all stack variables, and have computed a
2141 minimal interference graph, attempt to save some stack space. */
2142 partition_stack_vars ();
2144 dump_stack_var_partition ();
2147 switch (flag_stack_protect
)
2150 create_stack_guard ();
2153 case SPCT_FLAG_STRONG
:
2154 if (gen_stack_protect_signal
2155 || cfun
->calls_alloca
|| has_protected_decls
2156 || lookup_attribute ("stack_protect",
2157 DECL_ATTRIBUTES (current_function_decl
)))
2158 create_stack_guard ();
2161 case SPCT_FLAG_DEFAULT
:
2162 if (cfun
->calls_alloca
|| has_protected_decls
2163 || lookup_attribute ("stack_protect",
2164 DECL_ATTRIBUTES (current_function_decl
)))
2165 create_stack_guard ();
2168 case SPCT_FLAG_EXPLICIT
:
2169 if (lookup_attribute ("stack_protect",
2170 DECL_ATTRIBUTES (current_function_decl
)))
2171 create_stack_guard ();
2177 /* Assign rtl to each variable based on these partitions. */
2178 if (stack_vars_num
> 0)
2180 struct stack_vars_data data
;
2182 data
.asan_vec
= vNULL
;
2183 data
.asan_decl_vec
= vNULL
;
2184 data
.asan_base
= NULL_RTX
;
2185 data
.asan_alignb
= 0;
2187 /* Reorder decls to be protected by iterating over the variables
2188 array multiple times, and allocating out of each phase in turn. */
2189 /* ??? We could probably integrate this into the qsort we did
2190 earlier, such that we naturally see these variables first,
2191 and thus naturally allocate things in the right order. */
2192 if (has_protected_decls
)
2194 /* Phase 1 contains only character arrays. */
2195 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2197 /* Phase 2 contains other kinds of arrays. */
2198 if (flag_stack_protect
== SPCT_FLAG_ALL
2199 || flag_stack_protect
== SPCT_FLAG_STRONG
2200 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2201 && lookup_attribute ("stack_protect",
2202 DECL_ATTRIBUTES (current_function_decl
))))
2203 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2206 if (asan_sanitize_stack_p ())
2207 /* Phase 3, any partitions that need asan protection
2208 in addition to phase 1 and 2. */
2209 expand_stack_vars (asan_decl_phase_3
, &data
);
2211 if (!data
.asan_vec
.is_empty ())
2213 HOST_WIDE_INT prev_offset
= frame_offset
;
2214 HOST_WIDE_INT offset
, sz
, redzonesz
;
2215 redzonesz
= ASAN_RED_ZONE_SIZE
;
2216 sz
= data
.asan_vec
[0] - prev_offset
;
2217 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2218 && data
.asan_alignb
<= 4096
2219 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2220 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2221 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2223 = alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
);
2224 data
.asan_vec
.safe_push (prev_offset
);
2225 data
.asan_vec
.safe_push (offset
);
2226 /* Leave space for alignment if STRICT_ALIGNMENT. */
2227 if (STRICT_ALIGNMENT
)
2228 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2229 << ASAN_SHADOW_SHIFT
)
2230 / BITS_PER_UNIT
, 1);
2233 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2236 data
.asan_vec
.address (),
2237 data
.asan_decl_vec
.address (),
2238 data
.asan_vec
.length ());
2241 expand_stack_vars (NULL
, &data
);
2243 data
.asan_vec
.release ();
2244 data
.asan_decl_vec
.release ();
2247 fini_vars_expansion ();
2249 /* If there were any artificial non-ignored vars without rtl
2250 found earlier, see if deferred stack allocation hasn't assigned
2252 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2254 rtx rtl
= DECL_RTL_IF_SET (var
);
2256 /* Keep artificial non-ignored vars in cfun->local_decls
2257 chain until instantiate_decls. */
2258 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2259 add_local_decl (cfun
, var
);
2261 maybe_local_decls
.release ();
2263 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2264 if (STACK_ALIGNMENT_NEEDED
)
2266 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2267 if (!FRAME_GROWS_DOWNWARD
)
2268 frame_offset
+= align
- 1;
2269 frame_offset
&= -align
;
2276 /* If we need to produce a detailed dump, print the tree representation
2277 for STMT to the dump file. SINCE is the last RTX after which the RTL
2278 generated for STMT should have been appended. */
2281 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2285 fprintf (dump_file
, "\n;; ");
2286 print_gimple_stmt (dump_file
, stmt
, 0,
2287 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2288 fprintf (dump_file
, "\n");
2290 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2294 /* Maps the blocks that do not contain tree labels to rtx labels. */
2296 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2298 /* Returns the label_rtx expression for a label starting basic block BB. */
2300 static rtx_code_label
*
2301 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2303 gimple_stmt_iterator gsi
;
2306 if (bb
->flags
& BB_RTL
)
2307 return block_label (bb
);
2309 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2313 /* Find the tree label if it is present. */
2315 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2319 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2323 lab
= gimple_label_label (lab_stmt
);
2324 if (DECL_NONLOCAL (lab
))
2327 return jump_target_rtx (lab
);
2330 rtx_code_label
*l
= gen_label_rtx ();
2331 lab_rtx_for_bb
->put (bb
, l
);
2336 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2337 of a basic block where we just expanded the conditional at the end,
2338 possibly clean up the CFG and instruction sequence. LAST is the
2339 last instruction before the just emitted jump sequence. */
2342 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2344 /* Special case: when jumpif decides that the condition is
2345 trivial it emits an unconditional jump (and the necessary
2346 barrier). But we still have two edges, the fallthru one is
2347 wrong. purge_dead_edges would clean this up later. Unfortunately
2348 we have to insert insns (and split edges) before
2349 find_many_sub_basic_blocks and hence before purge_dead_edges.
2350 But splitting edges might create new blocks which depend on the
2351 fact that if there are two edges there's no barrier. So the
2352 barrier would get lost and verify_flow_info would ICE. Instead
2353 of auditing all edge splitters to care for the barrier (which
2354 normally isn't there in a cleaned CFG), fix it here. */
2355 if (BARRIER_P (get_last_insn ()))
2359 /* Now, we have a single successor block, if we have insns to
2360 insert on the remaining edge we potentially will insert
2361 it at the end of this block (if the dest block isn't feasible)
2362 in order to avoid splitting the edge. This insertion will take
2363 place in front of the last jump. But we might have emitted
2364 multiple jumps (conditional and one unconditional) to the
2365 same destination. Inserting in front of the last one then
2366 is a problem. See PR 40021. We fix this by deleting all
2367 jumps except the last unconditional one. */
2368 insn
= PREV_INSN (get_last_insn ());
2369 /* Make sure we have an unconditional jump. Otherwise we're
2371 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2372 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2374 insn
= PREV_INSN (insn
);
2375 if (JUMP_P (NEXT_INSN (insn
)))
2377 if (!any_condjump_p (NEXT_INSN (insn
)))
2379 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2380 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2382 delete_insn (NEXT_INSN (insn
));
2388 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2389 Returns a new basic block if we've terminated the current basic
2390 block and created a new one. */
2393 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2395 basic_block new_bb
, dest
;
2399 rtx_insn
*last2
, *last
;
2400 enum tree_code code
;
2403 code
= gimple_cond_code (stmt
);
2404 op0
= gimple_cond_lhs (stmt
);
2405 op1
= gimple_cond_rhs (stmt
);
2406 /* We're sometimes presented with such code:
2410 This would expand to two comparisons which then later might
2411 be cleaned up by combine. But some pattern matchers like if-conversion
2412 work better when there's only one compare, so make up for this
2413 here as special exception if TER would have made the same change. */
2415 && TREE_CODE (op0
) == SSA_NAME
2416 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2417 && TREE_CODE (op1
) == INTEGER_CST
2418 && ((gimple_cond_code (stmt
) == NE_EXPR
2419 && integer_zerop (op1
))
2420 || (gimple_cond_code (stmt
) == EQ_EXPR
2421 && integer_onep (op1
)))
2422 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2424 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2425 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2427 enum tree_code code2
= gimple_assign_rhs_code (second
);
2428 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2431 op0
= gimple_assign_rhs1 (second
);
2432 op1
= gimple_assign_rhs2 (second
);
2434 /* If jumps are cheap and the target does not support conditional
2435 compare, turn some more codes into jumpy sequences. */
2436 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2437 && targetm
.gen_ccmp_first
== NULL
)
2439 if ((code2
== BIT_AND_EXPR
2440 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2441 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2442 || code2
== TRUTH_AND_EXPR
)
2444 code
= TRUTH_ANDIF_EXPR
;
2445 op0
= gimple_assign_rhs1 (second
);
2446 op1
= gimple_assign_rhs2 (second
);
2448 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2450 code
= TRUTH_ORIF_EXPR
;
2451 op0
= gimple_assign_rhs1 (second
);
2452 op1
= gimple_assign_rhs2 (second
);
2458 last2
= last
= get_last_insn ();
2460 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2461 set_curr_insn_location (gimple_location (stmt
));
2463 /* These flags have no purpose in RTL land. */
2464 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2465 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2467 /* We can either have a pure conditional jump with one fallthru edge or
2468 two-way jump that needs to be decomposed into two basic blocks. */
2469 if (false_edge
->dest
== bb
->next_bb
)
2471 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2472 true_edge
->probability
);
2473 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2474 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2475 set_curr_insn_location (true_edge
->goto_locus
);
2476 false_edge
->flags
|= EDGE_FALLTHRU
;
2477 maybe_cleanup_end_of_block (false_edge
, last
);
2480 if (true_edge
->dest
== bb
->next_bb
)
2482 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2483 false_edge
->probability
);
2484 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2485 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2486 set_curr_insn_location (false_edge
->goto_locus
);
2487 true_edge
->flags
|= EDGE_FALLTHRU
;
2488 maybe_cleanup_end_of_block (true_edge
, last
);
2492 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2493 true_edge
->probability
);
2494 last
= get_last_insn ();
2495 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2496 set_curr_insn_location (false_edge
->goto_locus
);
2497 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2500 if (BARRIER_P (BB_END (bb
)))
2501 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2502 update_bb_for_insn (bb
);
2504 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2505 dest
= false_edge
->dest
;
2506 redirect_edge_succ (false_edge
, new_bb
);
2507 false_edge
->flags
|= EDGE_FALLTHRU
;
2508 new_bb
->count
= false_edge
->count
;
2509 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
2510 add_bb_to_loop (new_bb
, bb
->loop_father
);
2511 new_edge
= make_edge (new_bb
, dest
, 0);
2512 new_edge
->probability
= REG_BR_PROB_BASE
;
2513 new_edge
->count
= new_bb
->count
;
2514 if (BARRIER_P (BB_END (new_bb
)))
2515 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2516 update_bb_for_insn (new_bb
);
2518 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2520 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2522 set_curr_insn_location (true_edge
->goto_locus
);
2523 true_edge
->goto_locus
= curr_insn_location ();
2529 /* Mark all calls that can have a transaction restart. */
2532 mark_transaction_restart_calls (gimple
*stmt
)
2534 struct tm_restart_node dummy
;
2535 tm_restart_node
**slot
;
2537 if (!cfun
->gimple_df
->tm_restart
)
2541 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2544 struct tm_restart_node
*n
= *slot
;
2545 tree list
= n
->label_or_list
;
2548 for (insn
= next_real_insn (get_last_insn ());
2550 insn
= next_real_insn (insn
))
2553 if (TREE_CODE (list
) == LABEL_DECL
)
2554 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2556 for (; list
; list
= TREE_CHAIN (list
))
2557 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2561 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2565 expand_call_stmt (gcall
*stmt
)
2567 tree exp
, decl
, lhs
;
2571 if (gimple_call_internal_p (stmt
))
2573 expand_internal_call (stmt
);
2577 /* If this is a call to a built-in function and it has no effect other
2578 than setting the lhs, try to implement it using an internal function
2580 decl
= gimple_call_fndecl (stmt
);
2581 if (gimple_call_lhs (stmt
)
2582 && !gimple_has_side_effects (stmt
)
2583 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2585 internal_fn ifn
= replacement_internal_fn (stmt
);
2586 if (ifn
!= IFN_LAST
)
2588 expand_internal_call (ifn
, stmt
);
2593 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2595 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2596 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2598 /* If this is not a builtin function, the function type through which the
2599 call is made may be different from the type of the function. */
2602 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2603 CALL_EXPR_FN (exp
));
2605 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2606 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2608 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2610 tree arg
= gimple_call_arg (stmt
, i
);
2612 /* TER addresses into arguments of builtin functions so we have a
2613 chance to infer more correct alignment information. See PR39954. */
2615 && TREE_CODE (arg
) == SSA_NAME
2616 && (def
= get_gimple_for_ssa_name (arg
))
2617 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2618 arg
= gimple_assign_rhs1 (def
);
2619 CALL_EXPR_ARG (exp
, i
) = arg
;
2622 if (gimple_has_side_effects (stmt
))
2623 TREE_SIDE_EFFECTS (exp
) = 1;
2625 if (gimple_call_nothrow_p (stmt
))
2626 TREE_NOTHROW (exp
) = 1;
2628 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2629 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2630 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2632 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2633 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
2634 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2635 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2637 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2638 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2639 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2640 CALL_WITH_BOUNDS_P (exp
) = gimple_call_with_bounds_p (stmt
);
2642 /* Ensure RTL is created for debug args. */
2643 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2645 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2650 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2652 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2653 expand_debug_expr (dtemp
);
2657 lhs
= gimple_call_lhs (stmt
);
2659 expand_assignment (lhs
, exp
, false);
2661 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2663 mark_transaction_restart_calls (stmt
);
2667 /* Generate RTL for an asm statement (explicit assembler code).
2668 STRING is a STRING_CST node containing the assembler code text,
2669 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2670 insn is volatile; don't optimize it. */
2673 expand_asm_loc (tree string
, int vol
, location_t locus
)
2677 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2678 ggc_strdup (TREE_STRING_POINTER (string
)),
2681 MEM_VOLATILE_P (body
) = vol
;
2683 /* Non-empty basic ASM implicitly clobbers memory. */
2684 if (TREE_STRING_LENGTH (string
) != 0)
2687 unsigned i
, nclobbers
;
2688 auto_vec
<rtx
> input_rvec
, output_rvec
;
2689 auto_vec
<const char *> constraints
;
2690 auto_vec
<rtx
> clobber_rvec
;
2691 HARD_REG_SET clobbered_regs
;
2692 CLEAR_HARD_REG_SET (clobbered_regs
);
2694 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2695 clobber_rvec
.safe_push (clob
);
2697 if (targetm
.md_asm_adjust
)
2698 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2699 constraints
, clobber_rvec
,
2703 nclobbers
= clobber_rvec
.length ();
2704 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2706 XVECEXP (body
, 0, 0) = asm_op
;
2707 for (i
= 0; i
< nclobbers
; i
++)
2708 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2714 /* Return the number of times character C occurs in string S. */
2716 n_occurrences (int c
, const char *s
)
2724 /* A subroutine of expand_asm_operands. Check that all operands have
2725 the same number of alternatives. Return true if so. */
2728 check_operand_nalternatives (const vec
<const char *> &constraints
)
2730 unsigned len
= constraints
.length();
2733 int nalternatives
= n_occurrences (',', constraints
[0]);
2735 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2737 error ("too many alternatives in %<asm%>");
2741 for (unsigned i
= 1; i
< len
; ++i
)
2742 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2744 error ("operand constraints for %<asm%> differ "
2745 "in number of alternatives");
2752 /* Check for overlap between registers marked in CLOBBERED_REGS and
2753 anything inappropriate in T. Emit error and return the register
2754 variable definition for error, NULL_TREE for ok. */
2757 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2759 /* Conflicts between asm-declared register variables and the clobber
2760 list are not allowed. */
2761 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2765 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2766 DECL_NAME (overlap
));
2768 /* Reset registerness to stop multiple errors emitted for a single
2770 DECL_REGISTER (overlap
) = 0;
2777 /* Generate RTL for an asm statement with arguments.
2778 STRING is the instruction template.
2779 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2780 Each output or input has an expression in the TREE_VALUE and
2781 a tree list in TREE_PURPOSE which in turn contains a constraint
2782 name in TREE_VALUE (or NULL_TREE) and a constraint string
2784 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2785 that is clobbered by this insn.
2787 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2788 should be the fallthru basic block of the asm goto.
2790 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2791 Some elements of OUTPUTS may be replaced with trees representing temporary
2792 values. The caller should copy those temporary values to the originally
2795 VOL nonzero means the insn is volatile; don't optimize it. */
2798 expand_asm_stmt (gasm
*stmt
)
2800 class save_input_location
2805 explicit save_input_location(location_t where
)
2807 old
= input_location
;
2808 input_location
= where
;
2811 ~save_input_location()
2813 input_location
= old
;
2817 location_t locus
= gimple_location (stmt
);
2819 if (gimple_asm_input_p (stmt
))
2821 const char *s
= gimple_asm_string (stmt
);
2822 tree string
= build_string (strlen (s
), s
);
2823 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2827 /* There are some legacy diagnostics in here, and also avoids a
2828 sixth parameger to targetm.md_asm_adjust. */
2829 save_input_location
s_i_l(locus
);
2831 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2832 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2833 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2836 /* ??? Diagnose during gimplification? */
2837 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2839 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2843 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2844 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2845 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2847 /* Copy the gimple vectors into new vectors that we can manipulate. */
2849 output_tvec
.safe_grow (noutputs
);
2850 input_tvec
.safe_grow (ninputs
);
2851 constraints
.safe_grow (noutputs
+ ninputs
);
2853 for (i
= 0; i
< noutputs
; ++i
)
2855 tree t
= gimple_asm_output_op (stmt
, i
);
2856 output_tvec
[i
] = TREE_VALUE (t
);
2857 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2859 for (i
= 0; i
< ninputs
; i
++)
2861 tree t
= gimple_asm_input_op (stmt
, i
);
2862 input_tvec
[i
] = TREE_VALUE (t
);
2863 constraints
[i
+ noutputs
]
2864 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2867 /* ??? Diagnose during gimplification? */
2868 if (! check_operand_nalternatives (constraints
))
2871 /* Count the number of meaningful clobbered registers, ignoring what
2872 we would ignore later. */
2873 auto_vec
<rtx
> clobber_rvec
;
2874 HARD_REG_SET clobbered_regs
;
2875 CLEAR_HARD_REG_SET (clobbered_regs
);
2877 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2879 clobber_rvec
.reserve (n
);
2880 for (i
= 0; i
< n
; i
++)
2882 tree t
= gimple_asm_clobber_op (stmt
, i
);
2883 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2886 j
= decode_reg_name_and_count (regname
, &nregs
);
2891 /* ??? Diagnose during gimplification? */
2892 error ("unknown register name %qs in %<asm%>", regname
);
2896 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2897 clobber_rvec
.safe_push (x
);
2901 /* Otherwise we should have -1 == empty string
2902 or -3 == cc, which is not a register. */
2903 gcc_assert (j
== -1 || j
== -3);
2907 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
2909 /* Clobbering the PIC register is an error. */
2910 if (reg
== (int) PIC_OFFSET_TABLE_REGNUM
)
2912 /* ??? Diagnose during gimplification? */
2913 error ("PIC register clobbered by %qs in %<asm%>",
2918 SET_HARD_REG_BIT (clobbered_regs
, reg
);
2919 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
2920 clobber_rvec
.safe_push (x
);
2924 unsigned nclobbers
= clobber_rvec
.length();
2926 /* First pass over inputs and outputs checks validity and sets
2927 mark_addressable if needed. */
2928 /* ??? Diagnose during gimplification? */
2930 for (i
= 0; i
< noutputs
; ++i
)
2932 tree val
= output_tvec
[i
];
2933 tree type
= TREE_TYPE (val
);
2934 const char *constraint
;
2939 /* Try to parse the output constraint. If that fails, there's
2940 no point in going further. */
2941 constraint
= constraints
[i
];
2942 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
2943 &allows_mem
, &allows_reg
, &is_inout
))
2950 && REG_P (DECL_RTL (val
))
2951 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
2952 mark_addressable (val
);
2955 for (i
= 0; i
< ninputs
; ++i
)
2957 bool allows_reg
, allows_mem
;
2958 const char *constraint
;
2960 constraint
= constraints
[i
+ noutputs
];
2961 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
2962 constraints
.address (),
2963 &allows_mem
, &allows_reg
))
2966 if (! allows_reg
&& allows_mem
)
2967 mark_addressable (input_tvec
[i
]);
2970 /* Second pass evaluates arguments. */
2972 /* Make sure stack is consistent for asm goto. */
2974 do_pending_stack_adjust ();
2975 int old_generating_concat_p
= generating_concat_p
;
2977 /* Vector of RTX's of evaluated output operands. */
2978 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
2979 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
2980 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
2982 output_rvec
.safe_grow (noutputs
);
2984 for (i
= 0; i
< noutputs
; ++i
)
2986 tree val
= output_tvec
[i
];
2987 tree type
= TREE_TYPE (val
);
2988 bool is_inout
, allows_reg
, allows_mem
, ok
;
2991 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
2992 noutputs
, &allows_mem
, &allows_reg
,
2996 /* If an output operand is not a decl or indirect ref and our constraint
2997 allows a register, make a temporary to act as an intermediate.
2998 Make the asm insn write into that, then we will copy it to
2999 the real output operand. Likewise for promoted variables. */
3001 generating_concat_p
= 0;
3003 if ((TREE_CODE (val
) == INDIRECT_REF
3006 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3007 && ! (REG_P (DECL_RTL (val
))
3008 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3012 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3013 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3015 op
= validize_mem (op
);
3017 if (! allows_reg
&& !MEM_P (op
))
3018 error ("output number %d not directly addressable", i
);
3019 if ((! allows_mem
&& MEM_P (op
))
3020 || GET_CODE (op
) == CONCAT
)
3023 op
= gen_reg_rtx (GET_MODE (op
));
3025 generating_concat_p
= old_generating_concat_p
;
3028 emit_move_insn (op
, old_op
);
3030 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3031 emit_move_insn (old_op
, op
);
3032 after_rtl_seq
= get_insns ();
3033 after_rtl_end
= get_last_insn ();
3039 op
= assign_temp (type
, 0, 1);
3040 op
= validize_mem (op
);
3041 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3042 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3044 generating_concat_p
= old_generating_concat_p
;
3046 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3047 expand_assignment (val
, make_tree (type
, op
), false);
3048 after_rtl_seq
= get_insns ();
3049 after_rtl_end
= get_last_insn ();
3052 output_rvec
[i
] = op
;
3055 inout_opnum
.safe_push (i
);
3058 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3059 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3061 input_rvec
.safe_grow (ninputs
);
3062 input_mode
.safe_grow (ninputs
);
3064 generating_concat_p
= 0;
3066 for (i
= 0; i
< ninputs
; ++i
)
3068 tree val
= input_tvec
[i
];
3069 tree type
= TREE_TYPE (val
);
3070 bool allows_reg
, allows_mem
, ok
;
3071 const char *constraint
;
3074 constraint
= constraints
[i
+ noutputs
];
3075 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3076 constraints
.address (),
3077 &allows_mem
, &allows_reg
);
3080 /* EXPAND_INITIALIZER will not generate code for valid initializer
3081 constants, but will still generate code for other types of operand.
3082 This is the behavior we want for constant constraints. */
3083 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3084 allows_reg
? EXPAND_NORMAL
3085 : allows_mem
? EXPAND_MEMORY
3086 : EXPAND_INITIALIZER
);
3088 /* Never pass a CONCAT to an ASM. */
3089 if (GET_CODE (op
) == CONCAT
)
3090 op
= force_reg (GET_MODE (op
), op
);
3091 else if (MEM_P (op
))
3092 op
= validize_mem (op
);
3094 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3096 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3097 op
= force_reg (TYPE_MODE (type
), op
);
3098 else if (!allows_mem
)
3099 warning (0, "asm operand %d probably doesn%'t match constraints",
3101 else if (MEM_P (op
))
3103 /* We won't recognize either volatile memory or memory
3104 with a queued address as available a memory_operand
3105 at this point. Ignore it: clearly this *is* a memory. */
3111 input_mode
[i
] = TYPE_MODE (type
);
3114 /* For in-out operands, copy output rtx to input rtx. */
3115 unsigned ninout
= inout_opnum
.length();
3116 for (i
= 0; i
< ninout
; i
++)
3118 int j
= inout_opnum
[i
];
3119 rtx o
= output_rvec
[j
];
3121 input_rvec
.safe_push (o
);
3122 input_mode
.safe_push (GET_MODE (o
));
3125 sprintf (buffer
, "%d", j
);
3126 constraints
.safe_push (ggc_strdup (buffer
));
3130 /* Sometimes we wish to automatically clobber registers across an asm.
3131 Case in point is when the i386 backend moved from cc0 to a hard reg --
3132 maintaining source-level compatibility means automatically clobbering
3133 the flags register. */
3134 rtx_insn
*after_md_seq
= NULL
;
3135 if (targetm
.md_asm_adjust
)
3136 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3137 constraints
, clobber_rvec
,
3140 /* Do not allow the hook to change the output and input count,
3141 lest it mess up the operand numbering. */
3142 gcc_assert (output_rvec
.length() == noutputs
);
3143 gcc_assert (input_rvec
.length() == ninputs
);
3144 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3146 /* But it certainly can adjust the clobbers. */
3147 nclobbers
= clobber_rvec
.length();
3149 /* Third pass checks for easy conflicts. */
3150 /* ??? Why are we doing this on trees instead of rtx. */
3152 bool clobber_conflict_found
= 0;
3153 for (i
= 0; i
< noutputs
; ++i
)
3154 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3155 clobber_conflict_found
= 1;
3156 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3157 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3158 clobber_conflict_found
= 1;
3160 /* Make vectors for the expression-rtx, constraint strings,
3161 and named operands. */
3163 rtvec argvec
= rtvec_alloc (ninputs
);
3164 rtvec constraintvec
= rtvec_alloc (ninputs
);
3165 rtvec labelvec
= rtvec_alloc (nlabels
);
3167 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3168 : GET_MODE (output_rvec
[0])),
3169 ggc_strdup (gimple_asm_string (stmt
)),
3170 empty_string
, 0, argvec
, constraintvec
,
3172 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3174 for (i
= 0; i
< ninputs
; ++i
)
3176 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3177 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3178 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3179 constraints
[i
+ noutputs
],
3183 /* Copy labels to the vector. */
3184 rtx_code_label
*fallthru_label
= NULL
;
3187 basic_block fallthru_bb
= NULL
;
3188 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3190 fallthru_bb
= fallthru
->dest
;
3192 for (i
= 0; i
< nlabels
; ++i
)
3194 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3196 /* If asm goto has any labels in the fallthru basic block, use
3197 a label that we emit immediately after the asm goto. Expansion
3198 may insert further instructions into the same basic block after
3199 asm goto and if we don't do this, insertion of instructions on
3200 the fallthru edge might misbehave. See PR58670. */
3201 if (fallthru_bb
&& label_to_block_fn (cfun
, label
) == fallthru_bb
)
3203 if (fallthru_label
== NULL_RTX
)
3204 fallthru_label
= gen_label_rtx ();
3208 r
= label_rtx (label
);
3209 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3213 /* Now, for each output, construct an rtx
3214 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3215 ARGVEC CONSTRAINTS OPNAMES))
3216 If there is more than one, put them inside a PARALLEL. */
3218 if (nlabels
> 0 && nclobbers
== 0)
3220 gcc_assert (noutputs
== 0);
3221 emit_jump_insn (body
);
3223 else if (noutputs
== 0 && nclobbers
== 0)
3225 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3228 else if (noutputs
== 1 && nclobbers
== 0)
3230 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3231 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3241 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3243 /* For each output operand, store a SET. */
3244 for (i
= 0; i
< noutputs
; ++i
)
3246 rtx src
, o
= output_rvec
[i
];
3249 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3254 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3255 ASM_OPERANDS_TEMPLATE (obody
),
3256 constraints
[i
], i
, argvec
,
3257 constraintvec
, labelvec
, locus
);
3258 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3260 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3263 /* If there are no outputs (but there are some clobbers)
3264 store the bare ASM_OPERANDS into the PARALLEL. */
3266 XVECEXP (body
, 0, i
++) = obody
;
3268 /* Store (clobber REG) for each clobbered register specified. */
3269 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3271 rtx clobbered_reg
= clobber_rvec
[j
];
3273 /* Do sanity check for overlap between clobbers and respectively
3274 input and outputs that hasn't been handled. Such overlap
3275 should have been detected and reported above. */
3276 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3278 /* We test the old body (obody) contents to avoid
3279 tripping over the under-construction body. */
3280 for (unsigned k
= 0; k
< noutputs
; ++k
)
3281 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3282 internal_error ("asm clobber conflict with output operand");
3284 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3285 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3286 internal_error ("asm clobber conflict with input operand");
3289 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3293 emit_jump_insn (body
);
3298 generating_concat_p
= old_generating_concat_p
;
3301 emit_label (fallthru_label
);
3304 emit_insn (after_md_seq
);
3306 emit_insn (after_rtl_seq
);
3309 crtl
->has_asm_statement
= 1;
3312 /* Emit code to jump to the address
3313 specified by the pointer expression EXP. */
3316 expand_computed_goto (tree exp
)
3318 rtx x
= expand_normal (exp
);
3320 do_pending_stack_adjust ();
3321 emit_indirect_jump (x
);
3324 /* Generate RTL code for a `goto' statement with target label LABEL.
3325 LABEL should be a LABEL_DECL tree node that was or will later be
3326 defined with `expand_label'. */
3329 expand_goto (tree label
)
3333 /* Check for a nonlocal goto to a containing function. Should have
3334 gotten translated to __builtin_nonlocal_goto. */
3335 tree context
= decl_function_context (label
);
3336 gcc_assert (!context
|| context
== current_function_decl
);
3339 emit_jump (jump_target_rtx (label
));
3342 /* Output a return with no value. */
3345 expand_null_return_1 (void)
3347 clear_pending_stack_adjust ();
3348 do_pending_stack_adjust ();
3349 emit_jump (return_label
);
3352 /* Generate RTL to return from the current function, with no value.
3353 (That is, we do not do anything about returning any value.) */
3356 expand_null_return (void)
3358 /* If this function was declared to return a value, but we
3359 didn't, clobber the return registers so that they are not
3360 propagated live to the rest of the function. */
3361 clobber_return_register ();
3363 expand_null_return_1 ();
3366 /* Generate RTL to return from the current function, with value VAL. */
3369 expand_value_return (rtx val
)
3371 /* Copy the value to the return location unless it's already there. */
3373 tree decl
= DECL_RESULT (current_function_decl
);
3374 rtx return_reg
= DECL_RTL (decl
);
3375 if (return_reg
!= val
)
3377 tree funtype
= TREE_TYPE (current_function_decl
);
3378 tree type
= TREE_TYPE (decl
);
3379 int unsignedp
= TYPE_UNSIGNED (type
);
3380 machine_mode old_mode
= DECL_MODE (decl
);
3382 if (DECL_BY_REFERENCE (decl
))
3383 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3385 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3387 if (mode
!= old_mode
)
3388 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3390 if (GET_CODE (return_reg
) == PARALLEL
)
3391 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3393 emit_move_insn (return_reg
, val
);
3396 expand_null_return_1 ();
3399 /* Generate RTL to evaluate the expression RETVAL and return it
3400 from the current function. */
3403 expand_return (tree retval
, tree bounds
)
3410 /* If function wants no value, give it none. */
3411 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3413 expand_normal (retval
);
3414 expand_null_return ();
3418 if (retval
== error_mark_node
)
3420 /* Treat this like a return of no value from a function that
3422 expand_null_return ();
3425 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3426 || TREE_CODE (retval
) == INIT_EXPR
)
3427 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3428 retval_rhs
= TREE_OPERAND (retval
, 1);
3430 retval_rhs
= retval
;
3432 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3434 /* Put returned bounds to the right place. */
3435 bounds_rtl
= DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl
));
3441 if (bounds
&& bounds
!= error_mark_node
)
3443 bnd
= expand_normal (bounds
);
3444 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3446 else if (REG_P (bounds_rtl
))
3449 bnd
= chkp_expand_zero_bounds ();
3452 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3453 addr
= gen_rtx_MEM (Pmode
, addr
);
3454 bnd
= targetm
.calls
.load_bounds_for_arg (addr
, NULL
, NULL
);
3457 targetm
.calls
.store_returned_bounds (bounds_rtl
, bnd
);
3463 gcc_assert (GET_CODE (bounds_rtl
) == PARALLEL
);
3466 bnd
= chkp_expand_zero_bounds ();
3469 addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3470 addr
= gen_rtx_MEM (Pmode
, addr
);
3473 for (n
= 0; n
< XVECLEN (bounds_rtl
, 0); n
++)
3475 rtx slot
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 0);
3478 rtx offs
= XEXP (XVECEXP (bounds_rtl
, 0, n
), 1);
3479 rtx from
= adjust_address (addr
, Pmode
, INTVAL (offs
));
3480 bnd
= targetm
.calls
.load_bounds_for_arg (from
, NULL
, NULL
);
3482 targetm
.calls
.store_returned_bounds (slot
, bnd
);
3486 else if (chkp_function_instrumented_p (current_function_decl
)
3487 && !BOUNDED_P (retval_rhs
)
3488 && chkp_type_has_pointer (TREE_TYPE (retval_rhs
))
3489 && TREE_CODE (retval_rhs
) != RESULT_DECL
)
3491 rtx addr
= expand_normal (build_fold_addr_expr (retval_rhs
));
3492 addr
= gen_rtx_MEM (Pmode
, addr
);
3494 gcc_assert (MEM_P (result_rtl
));
3496 chkp_copy_bounds_for_stack_parm (result_rtl
, addr
, TREE_TYPE (retval_rhs
));
3499 /* If we are returning the RESULT_DECL, then the value has already
3500 been stored into it, so we don't have to do anything special. */
3501 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3502 expand_value_return (result_rtl
);
3504 /* If the result is an aggregate that is being returned in one (or more)
3505 registers, load the registers here. */
3507 else if (retval_rhs
!= 0
3508 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3509 && REG_P (result_rtl
))
3511 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3514 /* Use the mode of the result value on the return register. */
3515 PUT_MODE (result_rtl
, GET_MODE (val
));
3516 expand_value_return (val
);
3519 expand_null_return ();
3521 else if (retval_rhs
!= 0
3522 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3523 && (REG_P (result_rtl
)
3524 || (GET_CODE (result_rtl
) == PARALLEL
)))
3526 /* Compute the return value into a temporary (usually a pseudo reg). */
3528 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3529 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3530 val
= force_not_mem (val
);
3531 expand_value_return (val
);
3535 /* No hard reg used; calculate value into hard return reg. */
3536 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3537 expand_value_return (result_rtl
);
3541 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3542 STMT that doesn't require special handling for outgoing edges. That
3543 is no tailcalls and no GIMPLE_COND. */
3546 expand_gimple_stmt_1 (gimple
*stmt
)
3550 set_curr_insn_location (gimple_location (stmt
));
3552 switch (gimple_code (stmt
))
3555 op0
= gimple_goto_dest (stmt
);
3556 if (TREE_CODE (op0
) == LABEL_DECL
)
3559 expand_computed_goto (op0
);
3562 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3565 case GIMPLE_PREDICT
:
3568 expand_case (as_a
<gswitch
*> (stmt
));
3571 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3574 expand_call_stmt (as_a
<gcall
*> (stmt
));
3579 tree bnd
= gimple_return_retbnd (as_a
<greturn
*> (stmt
));
3580 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3582 if (op0
&& op0
!= error_mark_node
)
3584 tree result
= DECL_RESULT (current_function_decl
);
3586 /* Mark we have return statement with missing bounds. */
3588 && chkp_function_instrumented_p (cfun
->decl
)
3590 bnd
= error_mark_node
;
3592 /* If we are not returning the current function's RESULT_DECL,
3593 build an assignment to it. */
3596 /* I believe that a function's RESULT_DECL is unique. */
3597 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3599 /* ??? We'd like to use simply expand_assignment here,
3600 but this fails if the value is of BLKmode but the return
3601 decl is a register. expand_return has special handling
3602 for this combination, which eventually should move
3603 to common code. See comments there. Until then, let's
3604 build a modify expression :-/ */
3605 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3611 expand_null_return ();
3613 expand_return (op0
, bnd
);
3619 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3620 tree lhs
= gimple_assign_lhs (assign_stmt
);
3622 /* Tree expand used to fiddle with |= and &= of two bitfield
3623 COMPONENT_REFs here. This can't happen with gimple, the LHS
3624 of binary assigns must be a gimple reg. */
3626 if (TREE_CODE (lhs
) != SSA_NAME
3627 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3628 == GIMPLE_SINGLE_RHS
)
3630 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3631 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3632 == GIMPLE_SINGLE_RHS
);
3633 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3634 /* Do not put locations on possibly shared trees. */
3635 && !is_gimple_min_invariant (rhs
))
3636 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3637 if (TREE_CLOBBER_P (rhs
))
3638 /* This is a clobber to mark the going out of scope for
3642 expand_assignment (lhs
, rhs
,
3643 gimple_assign_nontemporal_move_p (
3649 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3650 struct separate_ops ops
;
3651 bool promoted
= false;
3653 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3654 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3657 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3658 ops
.type
= TREE_TYPE (lhs
);
3659 switch (get_gimple_rhs_class (ops
.code
))
3661 case GIMPLE_TERNARY_RHS
:
3662 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3664 case GIMPLE_BINARY_RHS
:
3665 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3667 case GIMPLE_UNARY_RHS
:
3668 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3673 ops
.location
= gimple_location (stmt
);
3675 /* If we want to use a nontemporal store, force the value to
3676 register first. If we store into a promoted register,
3677 don't directly expand to target. */
3678 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3679 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3686 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3687 /* If TEMP is a VOIDmode constant, use convert_modes to make
3688 sure that we properly convert it. */
3689 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3691 temp
= convert_modes (GET_MODE (target
),
3692 TYPE_MODE (ops
.type
),
3694 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3695 GET_MODE (target
), temp
, unsignedp
);
3698 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3700 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3704 temp
= force_operand (temp
, target
);
3706 emit_move_insn (target
, temp
);
3717 /* Expand one gimple statement STMT and return the last RTL instruction
3718 before any of the newly generated ones.
3720 In addition to generating the necessary RTL instructions this also
3721 sets REG_EH_REGION notes if necessary and sets the current source
3722 location for diagnostics. */
3725 expand_gimple_stmt (gimple
*stmt
)
3727 location_t saved_location
= input_location
;
3728 rtx_insn
*last
= get_last_insn ();
3733 /* We need to save and restore the current source location so that errors
3734 discovered during expansion are emitted with the right location. But
3735 it would be better if the diagnostic routines used the source location
3736 embedded in the tree nodes rather than globals. */
3737 if (gimple_has_location (stmt
))
3738 input_location
= gimple_location (stmt
);
3740 expand_gimple_stmt_1 (stmt
);
3742 /* Free any temporaries used to evaluate this statement. */
3745 input_location
= saved_location
;
3747 /* Mark all insns that may trap. */
3748 lp_nr
= lookup_stmt_eh_lp (stmt
);
3752 for (insn
= next_real_insn (last
); insn
;
3753 insn
= next_real_insn (insn
))
3755 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3756 /* If we want exceptions for non-call insns, any
3757 may_trap_p instruction may throw. */
3758 && GET_CODE (PATTERN (insn
)) != CLOBBER
3759 && GET_CODE (PATTERN (insn
)) != USE
3760 && insn_could_throw_p (insn
))
3761 make_reg_eh_region_note (insn
, 0, lp_nr
);
3768 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3769 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3770 generated a tail call (something that might be denied by the ABI
3771 rules governing the call; see calls.c).
3773 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3774 can still reach the rest of BB. The case here is __builtin_sqrt,
3775 where the NaN result goes through the external function (with a
3776 tailcall) and the normal result happens via a sqrt instruction. */
3779 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3781 rtx_insn
*last2
, *last
;
3787 last2
= last
= expand_gimple_stmt (stmt
);
3789 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3790 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3793 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3795 *can_fallthru
= true;
3799 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3800 Any instructions emitted here are about to be deleted. */
3801 do_pending_stack_adjust ();
3803 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3804 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3805 EH or abnormal edges, we shouldn't have created a tail call in
3806 the first place. So it seems to me we should just be removing
3807 all edges here, or redirecting the existing fallthru edge to
3813 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3815 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3817 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3819 e
->dest
->count
-= e
->count
;
3820 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
3821 if (e
->dest
->count
< 0)
3823 if (e
->dest
->frequency
< 0)
3824 e
->dest
->frequency
= 0;
3827 probability
+= e
->probability
;
3834 /* This is somewhat ugly: the call_expr expander often emits instructions
3835 after the sibcall (to perform the function return). These confuse the
3836 find_many_sub_basic_blocks code, so we need to get rid of these. */
3837 last
= NEXT_INSN (last
);
3838 gcc_assert (BARRIER_P (last
));
3840 *can_fallthru
= false;
3841 while (NEXT_INSN (last
))
3843 /* For instance an sqrt builtin expander expands if with
3844 sibcall in the then and label for `else`. */
3845 if (LABEL_P (NEXT_INSN (last
)))
3847 *can_fallthru
= true;
3850 delete_insn (NEXT_INSN (last
));
3853 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3855 e
->probability
+= probability
;
3858 update_bb_for_insn (bb
);
3860 if (NEXT_INSN (last
))
3862 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3865 if (BARRIER_P (last
))
3866 BB_END (bb
) = PREV_INSN (last
);
3869 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3874 /* Return the difference between the floor and the truncated result of
3875 a signed division by OP1 with remainder MOD. */
3877 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3879 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3880 return gen_rtx_IF_THEN_ELSE
3881 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3882 gen_rtx_IF_THEN_ELSE
3883 (mode
, gen_rtx_LT (BImode
,
3884 gen_rtx_DIV (mode
, op1
, mod
),
3886 constm1_rtx
, const0_rtx
),
3890 /* Return the difference between the ceil and the truncated result of
3891 a signed division by OP1 with remainder MOD. */
3893 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3895 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3896 return gen_rtx_IF_THEN_ELSE
3897 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3898 gen_rtx_IF_THEN_ELSE
3899 (mode
, gen_rtx_GT (BImode
,
3900 gen_rtx_DIV (mode
, op1
, mod
),
3902 const1_rtx
, const0_rtx
),
3906 /* Return the difference between the ceil and the truncated result of
3907 an unsigned division by OP1 with remainder MOD. */
3909 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
3911 /* (mod != 0 ? 1 : 0) */
3912 return gen_rtx_IF_THEN_ELSE
3913 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3914 const1_rtx
, const0_rtx
);
3917 /* Return the difference between the rounded and the truncated result
3918 of a signed division by OP1 with remainder MOD. Halfway cases are
3919 rounded away from zero, rather than to the nearest even number. */
3921 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3923 /* (abs (mod) >= abs (op1) - abs (mod)
3924 ? (op1 / mod > 0 ? 1 : -1)
3926 return gen_rtx_IF_THEN_ELSE
3927 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
3928 gen_rtx_MINUS (mode
,
3929 gen_rtx_ABS (mode
, op1
),
3930 gen_rtx_ABS (mode
, mod
))),
3931 gen_rtx_IF_THEN_ELSE
3932 (mode
, gen_rtx_GT (BImode
,
3933 gen_rtx_DIV (mode
, op1
, mod
),
3935 const1_rtx
, constm1_rtx
),
3939 /* Return the difference between the rounded and the truncated result
3940 of a unsigned division by OP1 with remainder MOD. Halfway cases
3941 are rounded away from zero, rather than to the nearest even
3944 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3946 /* (mod >= op1 - mod ? 1 : 0) */
3947 return gen_rtx_IF_THEN_ELSE
3948 (mode
, gen_rtx_GE (BImode
, mod
,
3949 gen_rtx_MINUS (mode
, op1
, mod
)),
3950 const1_rtx
, const0_rtx
);
3953 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3957 convert_debug_memory_address (machine_mode mode
, rtx x
,
3960 machine_mode xmode
= GET_MODE (x
);
3962 #ifndef POINTERS_EXTEND_UNSIGNED
3963 gcc_assert (mode
== Pmode
3964 || mode
== targetm
.addr_space
.address_mode (as
));
3965 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
3969 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
3971 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
3974 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
3975 x
= lowpart_subreg (mode
, x
, xmode
);
3976 else if (POINTERS_EXTEND_UNSIGNED
> 0)
3977 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
3978 else if (!POINTERS_EXTEND_UNSIGNED
)
3979 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
3982 switch (GET_CODE (x
))
3985 if ((SUBREG_PROMOTED_VAR_P (x
)
3986 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
3987 || (GET_CODE (SUBREG_REG (x
)) == PLUS
3988 && REG_P (XEXP (SUBREG_REG (x
), 0))
3989 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
3990 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
3991 && GET_MODE (SUBREG_REG (x
)) == mode
)
3992 return SUBREG_REG (x
);
3995 temp
= gen_rtx_LABEL_REF (mode
, LABEL_REF_LABEL (x
));
3996 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
3999 temp
= shallow_copy_rtx (x
);
4000 PUT_MODE (temp
, mode
);
4003 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4005 temp
= gen_rtx_CONST (mode
, temp
);
4009 if (CONST_INT_P (XEXP (x
, 1)))
4011 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4013 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4019 /* Don't know how to express ptr_extend as operation in debug info. */
4022 #endif /* POINTERS_EXTEND_UNSIGNED */
4027 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4028 by avoid_deep_ter_for_debug. */
4030 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4032 /* Split too deep TER chains for debug stmts using debug temporaries. */
4035 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4037 use_operand_p use_p
;
4039 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4041 tree use
= USE_FROM_PTR (use_p
);
4042 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4044 gimple
*g
= get_gimple_for_ssa_name (use
);
4047 if (depth
> 6 && !stmt_ends_bb_p (g
))
4049 if (deep_ter_debug_map
== NULL
)
4050 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4052 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4055 vexpr
= make_node (DEBUG_EXPR_DECL
);
4056 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4057 DECL_ARTIFICIAL (vexpr
) = 1;
4058 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4059 DECL_MODE (vexpr
) = TYPE_MODE (TREE_TYPE (use
));
4060 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4061 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4062 avoid_deep_ter_for_debug (def_temp
, 0);
4065 avoid_deep_ter_for_debug (g
, depth
+ 1);
4069 /* Return an RTX equivalent to the value of the parameter DECL. */
4072 expand_debug_parm_decl (tree decl
)
4074 rtx incoming
= DECL_INCOMING_RTL (decl
);
4077 && GET_MODE (incoming
) != BLKmode
4078 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4079 || (MEM_P (incoming
)
4080 && REG_P (XEXP (incoming
, 0))
4081 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4083 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4085 #ifdef HAVE_window_save
4086 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4087 If the target machine has an explicit window save instruction, the
4088 actual entry value is the corresponding OUTGOING_REGNO instead. */
4089 if (REG_P (incoming
)
4090 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4092 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4093 OUTGOING_REGNO (REGNO (incoming
)), 0);
4094 else if (MEM_P (incoming
))
4096 rtx reg
= XEXP (incoming
, 0);
4097 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4099 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4100 incoming
= replace_equiv_address_nv (incoming
, reg
);
4103 incoming
= copy_rtx (incoming
);
4107 ENTRY_VALUE_EXP (rtl
) = incoming
;
4112 && GET_MODE (incoming
) != BLKmode
4113 && !TREE_ADDRESSABLE (decl
)
4115 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4116 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4117 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4118 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4119 return copy_rtx (incoming
);
4124 /* Return an RTX equivalent to the value of the tree expression EXP. */
4127 expand_debug_expr (tree exp
)
4129 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4130 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4131 machine_mode inner_mode
= VOIDmode
;
4132 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4135 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4137 case tcc_expression
:
4138 switch (TREE_CODE (exp
))
4143 case WIDEN_MULT_PLUS_EXPR
:
4144 case WIDEN_MULT_MINUS_EXPR
:
4148 case TRUTH_ANDIF_EXPR
:
4149 case TRUTH_ORIF_EXPR
:
4150 case TRUTH_AND_EXPR
:
4152 case TRUTH_XOR_EXPR
:
4155 case TRUTH_NOT_EXPR
:
4164 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4171 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4174 switch (TREE_CODE (exp
))
4180 case WIDEN_LSHIFT_EXPR
:
4181 /* Ensure second operand isn't wider than the first one. */
4182 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4183 if (SCALAR_INT_MODE_P (inner_mode
))
4185 machine_mode opmode
= mode
;
4186 if (VECTOR_MODE_P (mode
))
4187 opmode
= GET_MODE_INNER (mode
);
4188 if (SCALAR_INT_MODE_P (opmode
)
4189 && (GET_MODE_PRECISION (opmode
)
4190 < GET_MODE_PRECISION (inner_mode
)))
4191 op1
= lowpart_subreg (opmode
, op1
, inner_mode
);
4201 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4202 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4207 case tcc_comparison
:
4208 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4216 case tcc_exceptional
:
4217 case tcc_declaration
:
4223 switch (TREE_CODE (exp
))
4226 if (!lookup_constant_def (exp
))
4228 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4229 != (size_t) TREE_STRING_LENGTH (exp
))
4231 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4232 op0
= gen_rtx_MEM (BLKmode
, op0
);
4233 set_mem_attributes (op0
, exp
, 0);
4236 /* Fall through... */
4241 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4245 gcc_assert (COMPLEX_MODE_P (mode
));
4246 op0
= expand_debug_expr (TREE_REALPART (exp
));
4247 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4248 return gen_rtx_CONCAT (mode
, op0
, op1
);
4250 case DEBUG_EXPR_DECL
:
4251 op0
= DECL_RTL_IF_SET (exp
);
4256 op0
= gen_rtx_DEBUG_EXPR (mode
);
4257 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4258 SET_DECL_RTL (exp
, op0
);
4268 op0
= DECL_RTL_IF_SET (exp
);
4270 /* This decl was probably optimized away. */
4273 if (TREE_CODE (exp
) != VAR_DECL
4274 || DECL_EXTERNAL (exp
)
4275 || !TREE_STATIC (exp
)
4277 || DECL_HARD_REGISTER (exp
)
4278 || DECL_IN_CONSTANT_POOL (exp
)
4279 || mode
== VOIDmode
)
4282 op0
= make_decl_rtl_for_debug (exp
);
4284 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4285 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4289 op0
= copy_rtx (op0
);
4291 if (GET_MODE (op0
) == BLKmode
4292 /* If op0 is not BLKmode, but mode is, adjust_mode
4293 below would ICE. While it is likely a FE bug,
4294 try to be robust here. See PR43166. */
4296 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4298 gcc_assert (MEM_P (op0
));
4299 op0
= adjust_address_nv (op0
, mode
, 0);
4309 inner_mode
= GET_MODE (op0
);
4311 if (mode
== inner_mode
)
4314 if (inner_mode
== VOIDmode
)
4316 if (TREE_CODE (exp
) == SSA_NAME
)
4317 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4319 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4320 if (mode
== inner_mode
)
4324 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4326 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
4327 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4328 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
4329 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4331 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4333 else if (FLOAT_MODE_P (mode
))
4335 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4336 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4337 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4339 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4341 else if (FLOAT_MODE_P (inner_mode
))
4344 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4346 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4348 else if (CONSTANT_P (op0
)
4349 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
4350 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4351 else if (UNARY_CLASS_P (exp
)
4352 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4354 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4356 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4362 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4364 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4365 TREE_OPERAND (exp
, 0),
4366 TREE_OPERAND (exp
, 1));
4368 return expand_debug_expr (newexp
);
4372 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4373 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4377 if (TREE_CODE (exp
) == MEM_REF
)
4379 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4380 || (GET_CODE (op0
) == PLUS
4381 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4382 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4383 Instead just use get_inner_reference. */
4386 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4387 if (!op1
|| !CONST_INT_P (op1
))
4390 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
4393 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4395 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4397 if (op0
== NULL_RTX
)
4400 op0
= gen_rtx_MEM (mode
, op0
);
4401 set_mem_attributes (op0
, exp
, 0);
4402 if (TREE_CODE (exp
) == MEM_REF
4403 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4404 set_mem_expr (op0
, NULL_TREE
);
4405 set_mem_addr_space (op0
, as
);
4409 case TARGET_MEM_REF
:
4410 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4411 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4414 op0
= expand_debug_expr
4415 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4419 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4420 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4422 if (op0
== NULL_RTX
)
4425 op0
= gen_rtx_MEM (mode
, op0
);
4427 set_mem_attributes (op0
, exp
, 0);
4428 set_mem_addr_space (op0
, as
);
4434 case ARRAY_RANGE_REF
:
4439 case VIEW_CONVERT_EXPR
:
4442 HOST_WIDE_INT bitsize
, bitpos
;
4444 int reversep
, volatilep
= 0;
4446 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4447 &unsignedp
, &reversep
, &volatilep
, false);
4453 orig_op0
= op0
= expand_debug_expr (tem
);
4460 machine_mode addrmode
, offmode
;
4465 op0
= XEXP (op0
, 0);
4466 addrmode
= GET_MODE (op0
);
4467 if (addrmode
== VOIDmode
)
4470 op1
= expand_debug_expr (offset
);
4474 offmode
= GET_MODE (op1
);
4475 if (offmode
== VOIDmode
)
4476 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4478 if (addrmode
!= offmode
)
4479 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4481 /* Don't use offset_address here, we don't need a
4482 recognizable address, and we don't want to generate
4484 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4490 if (mode1
== VOIDmode
)
4492 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
4493 if (bitpos
>= BITS_PER_UNIT
)
4495 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
4496 bitpos
%= BITS_PER_UNIT
;
4498 else if (bitpos
< 0)
4501 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
4502 op0
= adjust_address_nv (op0
, mode1
, -units
);
4503 bitpos
+= units
* BITS_PER_UNIT
;
4505 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
4506 op0
= adjust_address_nv (op0
, mode
, 0);
4507 else if (GET_MODE (op0
) != mode1
)
4508 op0
= adjust_address_nv (op0
, mode1
, 0);
4510 op0
= copy_rtx (op0
);
4511 if (op0
== orig_op0
)
4512 op0
= shallow_copy_rtx (op0
);
4513 set_mem_attributes (op0
, exp
, 0);
4516 if (bitpos
== 0 && mode
== GET_MODE (op0
))
4522 if (GET_MODE (op0
) == BLKmode
)
4525 if ((bitpos
% BITS_PER_UNIT
) == 0
4526 && bitsize
== GET_MODE_BITSIZE (mode1
))
4528 machine_mode opmode
= GET_MODE (op0
);
4530 if (opmode
== VOIDmode
)
4531 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4533 /* This condition may hold if we're expanding the address
4534 right past the end of an array that turned out not to
4535 be addressable (i.e., the address was only computed in
4536 debug stmts). The gen_subreg below would rightfully
4537 crash, and the address doesn't really exist, so just
4539 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
4542 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
4543 return simplify_gen_subreg (mode
, op0
, opmode
,
4544 bitpos
/ BITS_PER_UNIT
);
4547 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4548 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4550 : ZERO_EXTRACT
, mode
,
4551 GET_MODE (op0
) != VOIDmode
4553 : TYPE_MODE (TREE_TYPE (tem
)),
4554 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
4558 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4561 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4564 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4567 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4569 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4572 case FIX_TRUNC_EXPR
:
4573 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4576 case POINTER_PLUS_EXPR
:
4577 /* For the rare target where pointers are not the same size as
4578 size_t, we need to check for mis-matched modes and correct
4581 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
4582 && GET_MODE (op0
) != GET_MODE (op1
))
4584 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
))
4585 /* If OP0 is a partial mode, then we must truncate, even if it has
4586 the same bitsize as OP1 as GCC's representation of partial modes
4588 || (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_PARTIAL_INT
4589 && GET_MODE_BITSIZE (GET_MODE (op0
)) == GET_MODE_BITSIZE (GET_MODE (op1
))))
4590 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
4593 /* We always sign-extend, regardless of the signedness of
4594 the operand, because the operand is always unsigned
4595 here even if the original C expression is signed. */
4596 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
4601 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4604 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4607 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4610 case TRUNC_DIV_EXPR
:
4611 case EXACT_DIV_EXPR
:
4613 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4615 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4617 case TRUNC_MOD_EXPR
:
4618 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4620 case FLOOR_DIV_EXPR
:
4622 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4625 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4626 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4627 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4628 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4631 case FLOOR_MOD_EXPR
:
4633 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4636 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4637 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4638 adj
= simplify_gen_unary (NEG
, mode
,
4639 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4641 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4647 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4648 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4649 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4650 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4654 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4655 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4656 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4657 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4663 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4664 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4665 adj
= simplify_gen_unary (NEG
, mode
,
4666 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4668 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4672 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4673 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4674 adj
= simplify_gen_unary (NEG
, mode
,
4675 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4677 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4680 case ROUND_DIV_EXPR
:
4683 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4684 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4685 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4686 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4690 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4691 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4692 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4693 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4696 case ROUND_MOD_EXPR
:
4699 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4700 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4701 adj
= simplify_gen_unary (NEG
, mode
,
4702 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4704 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4708 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4709 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4710 adj
= simplify_gen_unary (NEG
, mode
,
4711 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4713 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4717 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4721 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4723 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4726 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4729 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4732 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4735 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4738 case TRUTH_AND_EXPR
:
4739 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4743 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4746 case TRUTH_XOR_EXPR
:
4747 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4749 case TRUTH_ANDIF_EXPR
:
4750 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4752 case TRUTH_ORIF_EXPR
:
4753 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4755 case TRUTH_NOT_EXPR
:
4756 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4759 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4763 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4767 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4771 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4775 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4778 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4780 case UNORDERED_EXPR
:
4781 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4784 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4787 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4790 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4793 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4796 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4799 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4802 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4805 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4808 gcc_assert (COMPLEX_MODE_P (mode
));
4809 if (GET_MODE (op0
) == VOIDmode
)
4810 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4811 if (GET_MODE (op1
) == VOIDmode
)
4812 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4813 return gen_rtx_CONCAT (mode
, op0
, op1
);
4816 if (GET_CODE (op0
) == CONCAT
)
4817 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4818 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4820 GET_MODE_INNER (mode
)));
4823 machine_mode imode
= GET_MODE_INNER (mode
);
4828 re
= adjust_address_nv (op0
, imode
, 0);
4829 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4833 machine_mode ifmode
= int_mode_for_mode (mode
);
4834 machine_mode ihmode
= int_mode_for_mode (imode
);
4836 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
4838 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4841 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4842 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4843 if (imode
!= ihmode
)
4844 re
= gen_rtx_SUBREG (imode
, re
, 0);
4845 im
= copy_rtx (op0
);
4847 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4848 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4849 if (imode
!= ihmode
)
4850 im
= gen_rtx_SUBREG (imode
, im
, 0);
4852 im
= gen_rtx_NEG (imode
, im
);
4853 return gen_rtx_CONCAT (mode
, re
, im
);
4857 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4858 if (!op0
|| !MEM_P (op0
))
4860 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4861 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4862 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4863 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4864 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4865 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4867 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4869 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
4872 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
4873 &bitsize
, &maxsize
, &reverse
);
4874 if ((TREE_CODE (decl
) == VAR_DECL
4875 || TREE_CODE (decl
) == PARM_DECL
4876 || TREE_CODE (decl
) == RESULT_DECL
)
4877 && (!TREE_ADDRESSABLE (decl
)
4878 || target_for_debug_bind (decl
))
4879 && (bitoffset
% BITS_PER_UNIT
) == 0
4881 && bitsize
== maxsize
)
4883 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4884 return plus_constant (mode
, base
, bitoffset
/ BITS_PER_UNIT
);
4888 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4889 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4892 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4895 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4896 || (GET_CODE (op0
) == PLUS
4897 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
4898 && CONST_INT_P (XEXP (op0
, 1)))))
4900 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
4902 if (!op1
|| !CONST_INT_P (op1
))
4905 return plus_constant (mode
, op0
, INTVAL (op1
));
4912 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
4913 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
4921 op0
= gen_rtx_CONCATN
4922 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4924 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
4926 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
4929 XVECEXP (op0
, 0, i
) = op1
;
4936 if (TREE_CLOBBER_P (exp
))
4938 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
4943 op0
= gen_rtx_CONCATN
4944 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
4946 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
4948 op1
= expand_debug_expr (val
);
4951 XVECEXP (op0
, 0, i
) = op1
;
4954 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
4956 op1
= expand_debug_expr
4957 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
4962 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
4963 XVECEXP (op0
, 0, i
) = op1
;
4969 goto flag_unsupported
;
4972 /* ??? Maybe handle some builtins? */
4977 gimple
*g
= get_gimple_for_ssa_name (exp
);
4981 if (deep_ter_debug_map
)
4983 tree
*slot
= deep_ter_debug_map
->get (exp
);
4988 t
= gimple_assign_rhs_to_tree (g
);
4989 op0
= expand_debug_expr (t
);
4995 /* If this is a reference to an incoming value of
4996 parameter that is never used in the code or where the
4997 incoming value is never used in the code, use
4998 PARM_DECL's DECL_RTL if set. */
4999 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5000 && SSA_NAME_VAR (exp
)
5001 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5002 && has_zero_uses (exp
))
5004 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5007 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5012 int part
= var_to_partition (SA
.map
, exp
);
5014 if (part
== NO_PARTITION
)
5017 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5019 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5027 /* Vector stuff. For most of the codes we don't have rtl codes. */
5028 case REALIGN_LOAD_EXPR
:
5029 case REDUC_MAX_EXPR
:
5030 case REDUC_MIN_EXPR
:
5031 case REDUC_PLUS_EXPR
:
5033 case VEC_PACK_FIX_TRUNC_EXPR
:
5034 case VEC_PACK_SAT_EXPR
:
5035 case VEC_PACK_TRUNC_EXPR
:
5036 case VEC_UNPACK_FLOAT_HI_EXPR
:
5037 case VEC_UNPACK_FLOAT_LO_EXPR
:
5038 case VEC_UNPACK_HI_EXPR
:
5039 case VEC_UNPACK_LO_EXPR
:
5040 case VEC_WIDEN_MULT_HI_EXPR
:
5041 case VEC_WIDEN_MULT_LO_EXPR
:
5042 case VEC_WIDEN_MULT_EVEN_EXPR
:
5043 case VEC_WIDEN_MULT_ODD_EXPR
:
5044 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5045 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5050 case ADDR_SPACE_CONVERT_EXPR
:
5051 case FIXED_CONVERT_EXPR
:
5053 case WITH_SIZE_EXPR
:
5054 case BIT_INSERT_EXPR
:
5058 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5059 && SCALAR_INT_MODE_P (mode
))
5062 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5064 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5067 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5069 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5071 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5072 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5076 case WIDEN_MULT_EXPR
:
5077 case WIDEN_MULT_PLUS_EXPR
:
5078 case WIDEN_MULT_MINUS_EXPR
:
5079 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5080 && SCALAR_INT_MODE_P (mode
))
5082 inner_mode
= GET_MODE (op0
);
5083 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5084 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5086 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5087 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5088 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5090 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5091 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5092 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5094 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5095 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5097 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5101 case MULT_HIGHPART_EXPR
:
5102 /* ??? Similar to the above. */
5105 case WIDEN_SUM_EXPR
:
5106 case WIDEN_LSHIFT_EXPR
:
5107 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5108 && SCALAR_INT_MODE_P (mode
))
5111 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5113 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5115 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5116 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5121 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
5134 /* Return an RTX equivalent to the source bind value of the tree expression
5138 expand_debug_source_expr (tree exp
)
5141 machine_mode mode
= VOIDmode
, inner_mode
;
5143 switch (TREE_CODE (exp
))
5147 mode
= DECL_MODE (exp
);
5148 op0
= expand_debug_parm_decl (exp
);
5151 /* See if this isn't an argument that has been completely
5153 if (!DECL_RTL_SET_P (exp
)
5154 && !DECL_INCOMING_RTL (exp
)
5155 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5157 tree aexp
= DECL_ORIGIN (exp
);
5158 if (DECL_CONTEXT (aexp
)
5159 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5161 vec
<tree
, va_gc
> **debug_args
;
5164 debug_args
= decl_debug_args_lookup (current_function_decl
);
5165 if (debug_args
!= NULL
)
5167 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5170 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5180 if (op0
== NULL_RTX
)
5183 inner_mode
= GET_MODE (op0
);
5184 if (mode
== inner_mode
)
5187 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5189 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
5190 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5191 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
5192 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5194 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5196 else if (FLOAT_MODE_P (mode
))
5198 else if (FLOAT_MODE_P (inner_mode
))
5200 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5201 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5203 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5205 else if (CONSTANT_P (op0
)
5206 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
5207 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5208 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5209 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5211 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5216 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5217 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5218 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5221 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5225 if (exp
== NULL_RTX
)
5228 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5233 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5234 rtx dval
= make_debug_expr_from_rtl (exp
);
5236 /* Emit a debug bind insn before INSN. */
5237 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5238 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5239 VAR_INIT_STATUS_INITIALIZED
);
5241 emit_debug_insn_before (bind
, insn
);
5246 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5248 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5249 switch (*format_ptr
++)
5252 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5257 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5258 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5266 /* Expand the _LOCs in debug insns. We run this after expanding all
5267 regular insns, so that any variables referenced in the function
5268 will have their DECL_RTLs set. */
5271 expand_debug_locations (void)
5274 rtx_insn
*last
= get_last_insn ();
5275 int save_strict_alias
= flag_strict_aliasing
;
5277 /* New alias sets while setting up memory attributes cause
5278 -fcompare-debug failures, even though it doesn't bring about any
5280 flag_strict_aliasing
= 0;
5282 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5283 if (DEBUG_INSN_P (insn
))
5285 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5287 rtx_insn
*prev_insn
, *insn2
;
5290 if (value
== NULL_TREE
)
5294 if (INSN_VAR_LOCATION_STATUS (insn
)
5295 == VAR_INIT_STATUS_UNINITIALIZED
)
5296 val
= expand_debug_source_expr (value
);
5297 /* The avoid_deep_ter_for_debug function inserts
5298 debug bind stmts after SSA_NAME definition, with the
5299 SSA_NAME as the whole bind location. Disable temporarily
5300 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5301 being defined in this DEBUG_INSN. */
5302 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5304 tree
*slot
= deep_ter_debug_map
->get (value
);
5307 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5312 val
= expand_debug_expr (value
);
5314 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5317 val
= expand_debug_expr (value
);
5318 gcc_assert (last
== get_last_insn ());
5322 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5325 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5327 gcc_assert (mode
== GET_MODE (val
)
5328 || (GET_MODE (val
) == VOIDmode
5329 && (CONST_SCALAR_INT_P (val
)
5330 || GET_CODE (val
) == CONST_FIXED
5331 || GET_CODE (val
) == LABEL_REF
)));
5334 INSN_VAR_LOCATION_LOC (insn
) = val
;
5335 prev_insn
= PREV_INSN (insn
);
5336 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5337 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5340 flag_strict_aliasing
= save_strict_alias
;
5343 /* Performs swapping operands of commutative operations to expand
5344 the expensive one first. */
5347 reorder_operands (basic_block bb
)
5349 unsigned int *lattice
; /* Hold cost of each statement. */
5350 unsigned int i
= 0, n
= 0;
5351 gimple_stmt_iterator gsi
;
5357 use_operand_p use_p
;
5358 gimple
*def0
, *def1
;
5360 /* Compute cost of each statement using estimate_num_insns. */
5361 stmts
= bb_seq (bb
);
5362 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5364 stmt
= gsi_stmt (gsi
);
5365 if (!is_gimple_debug (stmt
))
5366 gimple_set_uid (stmt
, n
++);
5368 lattice
= XNEWVEC (unsigned int, n
);
5369 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5372 stmt
= gsi_stmt (gsi
);
5373 if (is_gimple_debug (stmt
))
5375 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5377 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5379 tree use
= USE_FROM_PTR (use_p
);
5381 if (TREE_CODE (use
) != SSA_NAME
)
5383 def_stmt
= get_gimple_for_ssa_name (use
);
5386 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5389 if (!is_gimple_assign (stmt
)
5390 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5392 op0
= gimple_op (stmt
, 1);
5393 op1
= gimple_op (stmt
, 2);
5394 if (TREE_CODE (op0
) != SSA_NAME
5395 || TREE_CODE (op1
) != SSA_NAME
)
5397 /* Swap operands if the second one is more expensive. */
5398 def0
= get_gimple_for_ssa_name (op0
);
5399 def1
= get_gimple_for_ssa_name (op1
);
5403 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5407 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5409 fprintf (dump_file
, "Swap operands in stmt:\n");
5410 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5411 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5412 def0
? lattice
[gimple_uid (def0
)] : 0,
5413 lattice
[gimple_uid (def1
)]);
5415 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5416 gimple_assign_rhs2_ptr (stmt
));
5422 /* Expand basic block BB from GIMPLE trees to RTL. */
5425 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5427 gimple_stmt_iterator gsi
;
5429 gimple
*stmt
= NULL
;
5436 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5439 /* Note that since we are now transitioning from GIMPLE to RTL, we
5440 cannot use the gsi_*_bb() routines because they expect the basic
5441 block to be in GIMPLE, instead of RTL. Therefore, we need to
5442 access the BB sequence directly. */
5444 reorder_operands (bb
);
5445 stmts
= bb_seq (bb
);
5446 bb
->il
.gimple
.seq
= NULL
;
5447 bb
->il
.gimple
.phi_nodes
= NULL
;
5448 rtl_profile_for_bb (bb
);
5449 init_rtl_bb_info (bb
);
5450 bb
->flags
|= BB_RTL
;
5452 /* Remove the RETURN_EXPR if we may fall though to the exit
5454 gsi
= gsi_last (stmts
);
5455 if (!gsi_end_p (gsi
)
5456 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5458 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5460 gcc_assert (single_succ_p (bb
));
5461 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5463 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5464 && !gimple_return_retval (ret_stmt
))
5466 gsi_remove (&gsi
, false);
5467 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5471 gsi
= gsi_start (stmts
);
5472 if (!gsi_end_p (gsi
))
5474 stmt
= gsi_stmt (gsi
);
5475 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5479 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5483 last
= get_last_insn ();
5487 expand_gimple_stmt (stmt
);
5494 /* Java emits line number notes in the top of labels.
5495 ??? Make this go away once line number notes are obsoleted. */
5496 BB_HEAD (bb
) = NEXT_INSN (last
);
5497 if (NOTE_P (BB_HEAD (bb
)))
5498 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5499 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5501 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5504 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5506 NOTE_BASIC_BLOCK (note
) = bb
;
5508 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5512 stmt
= gsi_stmt (gsi
);
5514 /* If this statement is a non-debug one, and we generate debug
5515 insns, then this one might be the last real use of a TERed
5516 SSA_NAME, but where there are still some debug uses further
5517 down. Expanding the current SSA name in such further debug
5518 uses by their RHS might lead to wrong debug info, as coalescing
5519 might make the operands of such RHS be placed into the same
5520 pseudo as something else. Like so:
5521 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5525 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5526 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5527 the write to a_2 would actually have clobbered the place which
5530 So, instead of that, we recognize the situation, and generate
5531 debug temporaries at the last real use of TERed SSA names:
5538 if (MAY_HAVE_DEBUG_INSNS
5540 && !is_gimple_debug (stmt
))
5546 location_t sloc
= curr_insn_location ();
5548 /* Look for SSA names that have their last use here (TERed
5549 names always have only one real use). */
5550 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5551 if ((def
= get_gimple_for_ssa_name (op
)))
5553 imm_use_iterator imm_iter
;
5554 use_operand_p use_p
;
5555 bool have_debug_uses
= false;
5557 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5559 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5561 have_debug_uses
= true;
5566 if (have_debug_uses
)
5568 /* OP is a TERed SSA name, with DEF its defining
5569 statement, and where OP is used in further debug
5570 instructions. Generate a debug temporary, and
5571 replace all uses of OP in debug insns with that
5574 tree value
= gimple_assign_rhs_to_tree (def
);
5575 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5579 set_curr_insn_location (gimple_location (def
));
5581 DECL_ARTIFICIAL (vexpr
) = 1;
5582 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5584 mode
= DECL_MODE (value
);
5586 mode
= TYPE_MODE (TREE_TYPE (value
));
5587 DECL_MODE (vexpr
) = mode
;
5589 val
= gen_rtx_VAR_LOCATION
5590 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5592 emit_debug_insn (val
);
5594 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5596 if (!gimple_debug_bind_p (debugstmt
))
5599 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5600 SET_USE (use_p
, vexpr
);
5602 update_stmt (debugstmt
);
5606 set_curr_insn_location (sloc
);
5609 currently_expanding_gimple_stmt
= stmt
;
5611 /* Expand this statement, then evaluate the resulting RTL and
5612 fixup the CFG accordingly. */
5613 if (gimple_code (stmt
) == GIMPLE_COND
)
5615 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5619 else if (gimple_debug_bind_p (stmt
))
5621 location_t sloc
= curr_insn_location ();
5622 gimple_stmt_iterator nsi
= gsi
;
5626 tree var
= gimple_debug_bind_get_var (stmt
);
5631 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5632 && TREE_CODE (var
) != LABEL_DECL
5633 && !target_for_debug_bind (var
))
5634 goto delink_debug_stmt
;
5636 if (gimple_debug_bind_has_value_p (stmt
))
5637 value
= gimple_debug_bind_get_value (stmt
);
5641 last
= get_last_insn ();
5643 set_curr_insn_location (gimple_location (stmt
));
5646 mode
= DECL_MODE (var
);
5648 mode
= TYPE_MODE (TREE_TYPE (var
));
5650 val
= gen_rtx_VAR_LOCATION
5651 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5653 emit_debug_insn (val
);
5655 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5657 /* We can't dump the insn with a TREE where an RTX
5659 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5660 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5661 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5665 /* In order not to generate too many debug temporaries,
5666 we delink all uses of debug statements we already expanded.
5667 Therefore debug statements between definition and real
5668 use of TERed SSA names will continue to use the SSA name,
5669 and not be replaced with debug temps. */
5670 delink_stmt_imm_use (stmt
);
5674 if (gsi_end_p (nsi
))
5676 stmt
= gsi_stmt (nsi
);
5677 if (!gimple_debug_bind_p (stmt
))
5681 set_curr_insn_location (sloc
);
5683 else if (gimple_debug_source_bind_p (stmt
))
5685 location_t sloc
= curr_insn_location ();
5686 tree var
= gimple_debug_source_bind_get_var (stmt
);
5687 tree value
= gimple_debug_source_bind_get_value (stmt
);
5691 last
= get_last_insn ();
5693 set_curr_insn_location (gimple_location (stmt
));
5695 mode
= DECL_MODE (var
);
5697 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5698 VAR_INIT_STATUS_UNINITIALIZED
);
5700 emit_debug_insn (val
);
5702 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5704 /* We can't dump the insn with a TREE where an RTX
5706 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5707 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5708 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5711 set_curr_insn_location (sloc
);
5715 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5717 && gimple_call_tail_p (call_stmt
)
5718 && disable_tail_calls
)
5719 gimple_call_set_tail (call_stmt
, false);
5721 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5724 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5735 def_operand_p def_p
;
5736 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5740 /* Ignore this stmt if it is in the list of
5741 replaceable expressions. */
5743 && bitmap_bit_p (SA
.values
,
5744 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5747 last
= expand_gimple_stmt (stmt
);
5748 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5753 currently_expanding_gimple_stmt
= NULL
;
5755 /* Expand implicit goto and convert goto_locus. */
5756 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5758 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5759 set_curr_insn_location (e
->goto_locus
);
5760 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5762 emit_jump (label_rtx_for_bb (e
->dest
));
5763 e
->flags
&= ~EDGE_FALLTHRU
;
5767 /* Expanded RTL can create a jump in the last instruction of block.
5768 This later might be assumed to be a jump to successor and break edge insertion.
5769 We need to insert dummy move to prevent this. PR41440. */
5770 if (single_succ_p (bb
)
5771 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5772 && (last
= get_last_insn ())
5775 rtx dummy
= gen_reg_rtx (SImode
);
5776 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5779 do_pending_stack_adjust ();
5781 /* Find the block tail. The last insn in the block is the insn
5782 before a barrier and/or table jump insn. */
5783 last
= get_last_insn ();
5784 if (BARRIER_P (last
))
5785 last
= PREV_INSN (last
);
5786 if (JUMP_TABLE_DATA_P (last
))
5787 last
= PREV_INSN (PREV_INSN (last
));
5790 update_bb_for_insn (bb
);
5796 /* Create a basic block for initialization code. */
5799 construct_init_block (void)
5801 basic_block init_block
, first_block
;
5805 /* Multiple entry points not supported yet. */
5806 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5807 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5808 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5809 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5810 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5812 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5814 /* When entry edge points to first basic block, we don't need jump,
5815 otherwise we have to jump into proper target. */
5816 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5818 tree label
= gimple_block_label (e
->dest
);
5820 emit_jump (jump_target_rtx (label
));
5824 flags
= EDGE_FALLTHRU
;
5826 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5828 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5829 init_block
->frequency
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5830 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5831 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5834 first_block
= e
->dest
;
5835 redirect_edge_succ (e
, init_block
);
5836 e
= make_edge (init_block
, first_block
, flags
);
5839 e
= make_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5840 e
->probability
= REG_BR_PROB_BASE
;
5841 e
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5843 update_bb_for_insn (init_block
);
5847 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5848 found in the block tree. */
5851 set_block_levels (tree block
, int level
)
5855 BLOCK_NUMBER (block
) = level
;
5856 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5857 block
= BLOCK_CHAIN (block
);
5861 /* Create a block containing landing pads and similar stuff. */
5864 construct_exit_block (void)
5866 rtx_insn
*head
= get_last_insn ();
5868 basic_block exit_block
;
5872 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
5873 rtx_insn
*orig_end
= BB_END (prev_bb
);
5875 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5877 /* Make sure the locus is set to the end of the function, so that
5878 epilogue line numbers and warnings are set properly. */
5879 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
5880 input_location
= cfun
->function_end_locus
;
5882 /* Generate rtl for function exit. */
5883 expand_function_end ();
5885 end
= get_last_insn ();
5888 /* While emitting the function end we could move end of the last basic
5890 BB_END (prev_bb
) = orig_end
;
5891 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
5892 head
= NEXT_INSN (head
);
5893 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5894 bb frequency counting will be confused. Any instructions before that
5895 label are emitted for the case where PREV_BB falls through into the
5896 exit block, so append those instructions to prev_bb in that case. */
5897 if (NEXT_INSN (head
) != return_label
)
5899 while (NEXT_INSN (head
) != return_label
)
5901 if (!NOTE_P (NEXT_INSN (head
)))
5902 BB_END (prev_bb
) = NEXT_INSN (head
);
5903 head
= NEXT_INSN (head
);
5906 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
5907 exit_block
->frequency
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->frequency
;
5908 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5909 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5912 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
5914 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
5915 if (!(e
->flags
& EDGE_ABNORMAL
))
5916 redirect_edge_succ (e
, exit_block
);
5921 e
= make_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_FALLTHRU
);
5922 e
->probability
= REG_BR_PROB_BASE
;
5923 e
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
5924 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
5927 e
->count
-= e2
->count
;
5928 exit_block
->count
-= e2
->count
;
5929 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
5933 if (exit_block
->count
< 0)
5934 exit_block
->count
= 0;
5935 if (exit_block
->frequency
< 0)
5936 exit_block
->frequency
= 0;
5937 update_bb_for_insn (exit_block
);
5940 /* Helper function for discover_nonconstant_array_refs.
5941 Look for ARRAY_REF nodes with non-constant indexes and mark them
5945 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
5946 void *data ATTRIBUTE_UNUSED
)
5950 if (IS_TYPE_OR_DECL_P (t
))
5952 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5954 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5955 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
5956 && (!TREE_OPERAND (t
, 2)
5957 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5958 || (TREE_CODE (t
) == COMPONENT_REF
5959 && (!TREE_OPERAND (t
,2)
5960 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
5961 || TREE_CODE (t
) == BIT_FIELD_REF
5962 || TREE_CODE (t
) == REALPART_EXPR
5963 || TREE_CODE (t
) == IMAGPART_EXPR
5964 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
5965 || CONVERT_EXPR_P (t
))
5966 t
= TREE_OPERAND (t
, 0);
5968 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
5970 t
= get_base_address (t
);
5972 && DECL_MODE (t
) != BLKmode
)
5973 TREE_ADDRESSABLE (t
) = 1;
5982 /* RTL expansion is not able to compile array references with variable
5983 offsets for arrays stored in single register. Discover such
5984 expressions and mark variables as addressable to avoid this
5988 discover_nonconstant_array_refs (void)
5991 gimple_stmt_iterator gsi
;
5993 FOR_EACH_BB_FN (bb
, cfun
)
5994 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5996 gimple
*stmt
= gsi_stmt (gsi
);
5997 if (!is_gimple_debug (stmt
))
5998 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6002 /* This function sets crtl->args.internal_arg_pointer to a virtual
6003 register if DRAP is needed. Local register allocator will replace
6004 virtual_incoming_args_rtx with the virtual register. */
6007 expand_stack_alignment (void)
6010 unsigned int preferred_stack_boundary
;
6012 if (! SUPPORTS_STACK_ALIGNMENT
)
6015 if (cfun
->calls_alloca
6016 || cfun
->has_nonlocal_label
6017 || crtl
->has_nonlocal_goto
)
6018 crtl
->need_drap
= true;
6020 /* Call update_stack_boundary here again to update incoming stack
6021 boundary. It may set incoming stack alignment to a different
6022 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6023 use the minimum incoming stack alignment to check if it is OK
6024 to perform sibcall optimization since sibcall optimization will
6025 only align the outgoing stack to incoming stack boundary. */
6026 if (targetm
.calls
.update_stack_boundary
)
6027 targetm
.calls
.update_stack_boundary ();
6029 /* The incoming stack frame has to be aligned at least at
6030 parm_stack_boundary. */
6031 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6033 /* Update crtl->stack_alignment_estimated and use it later to align
6034 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6035 exceptions since callgraph doesn't collect incoming stack alignment
6037 if (cfun
->can_throw_non_call_exceptions
6038 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6039 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6041 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6042 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6043 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6044 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6045 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6047 gcc_assert (crtl
->stack_alignment_needed
6048 <= crtl
->stack_alignment_estimated
);
6050 crtl
->stack_realign_needed
6051 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6052 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6054 crtl
->stack_realign_processed
= true;
6056 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6058 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6059 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6061 /* stack_realign_drap and drap_rtx must match. */
6062 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6064 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6065 if (NULL
!= drap_rtx
)
6067 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6069 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6071 fixup_tail_calls ();
6077 expand_main_function (void)
6079 #if (defined(INVOKE__main) \
6080 || (!defined(HAS_INIT_SECTION) \
6081 && !defined(INIT_SECTION_ASM_OP) \
6082 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6083 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
, 0);
6088 /* Expand code to initialize the stack_protect_guard. This is invoked at
6089 the beginning of a function to be protected. */
6092 stack_protect_prologue (void)
6094 tree guard_decl
= targetm
.stack_protect_guard ();
6097 x
= expand_normal (crtl
->stack_protect_guard
);
6098 y
= expand_normal (guard_decl
);
6100 /* Allow the target to copy from Y to X without leaking Y into a
6102 if (targetm
.have_stack_protect_set ())
6103 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6109 /* Otherwise do a straight move. */
6110 emit_move_insn (x
, y
);
6113 /* Translate the intermediate representation contained in the CFG
6114 from GIMPLE trees to RTL.
6116 We do conversion per basic block and preserve/update the tree CFG.
6117 This implies we have to do some magic as the CFG can simultaneously
6118 consist of basic blocks containing RTL and GIMPLE trees. This can
6119 confuse the CFG hooks, so be careful to not manipulate CFG during
6124 const pass_data pass_data_expand
=
6126 RTL_PASS
, /* type */
6127 "expand", /* name */
6128 OPTGROUP_NONE
, /* optinfo_flags */
6129 TV_EXPAND
, /* tv_id */
6130 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6133 | PROP_gimple_lva
), /* properties_required */
6134 PROP_rtl
, /* properties_provided */
6135 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6136 0, /* todo_flags_start */
6137 0, /* todo_flags_finish */
6140 class pass_expand
: public rtl_opt_pass
6143 pass_expand (gcc::context
*ctxt
)
6144 : rtl_opt_pass (pass_data_expand
, ctxt
)
6147 /* opt_pass methods: */
6148 virtual unsigned int execute (function
*);
6150 }; // class pass_expand
6153 pass_expand::execute (function
*fun
)
6155 basic_block bb
, init_block
;
6159 rtx_insn
*var_seq
, *var_ret_seq
;
6162 timevar_push (TV_OUT_OF_SSA
);
6163 rewrite_out_of_ssa (&SA
);
6164 timevar_pop (TV_OUT_OF_SSA
);
6165 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6167 if (MAY_HAVE_DEBUG_STMTS
&& flag_tree_ter
)
6169 gimple_stmt_iterator gsi
;
6170 FOR_EACH_BB_FN (bb
, cfun
)
6171 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6172 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6173 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6176 /* Make sure all values used by the optimization passes have sane
6180 /* Some backends want to know that we are expanding to RTL. */
6181 currently_expanding_to_rtl
= 1;
6182 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6183 free_dominance_info (CDI_DOMINATORS
);
6185 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6187 if (chkp_function_instrumented_p (current_function_decl
))
6188 chkp_reset_rtl_bounds ();
6190 insn_locations_init ();
6191 if (!DECL_IS_BUILTIN (current_function_decl
))
6193 /* Eventually, all FEs should explicitly set function_start_locus. */
6194 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6195 set_curr_insn_location
6196 (DECL_SOURCE_LOCATION (current_function_decl
));
6198 set_curr_insn_location (fun
->function_start_locus
);
6201 set_curr_insn_location (UNKNOWN_LOCATION
);
6202 prologue_location
= curr_insn_location ();
6204 #ifdef INSN_SCHEDULING
6205 init_sched_attrs ();
6208 /* Make sure first insn is a note even if we don't want linenums.
6209 This makes sure the first insn will never be deleted.
6210 Also, final expects a note to appear there. */
6211 emit_note (NOTE_INSN_DELETED
);
6213 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6214 discover_nonconstant_array_refs ();
6216 targetm
.expand_to_rtl_hook ();
6217 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
6218 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
6219 crtl
->stack_alignment_estimated
= 0;
6220 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
6221 fun
->cfg
->max_jumptable_ents
= 0;
6223 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6224 of the function section at exapnsion time to predict distance of calls. */
6225 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6227 /* Expand the variables recorded during gimple lowering. */
6228 timevar_push (TV_VAR_EXPAND
);
6231 var_ret_seq
= expand_used_vars ();
6233 var_seq
= get_insns ();
6235 timevar_pop (TV_VAR_EXPAND
);
6237 /* Honor stack protection warnings. */
6238 if (warn_stack_protect
)
6240 if (fun
->calls_alloca
)
6241 warning (OPT_Wstack_protector
,
6242 "stack protector not protecting local variables: "
6243 "variable length buffer");
6244 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6245 warning (OPT_Wstack_protector
,
6246 "stack protector not protecting function: "
6247 "all local arrays are less than %d bytes long",
6248 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
6251 /* Set up parameters and prepare for return, for the function. */
6252 expand_function_start (current_function_decl
);
6254 /* If we emitted any instructions for setting up the variables,
6255 emit them before the FUNCTION_START note. */
6258 emit_insn_before (var_seq
, parm_birth_insn
);
6260 /* In expand_function_end we'll insert the alloca save/restore
6261 before parm_birth_insn. We've just insertted an alloca call.
6262 Adjust the pointer to match. */
6263 parm_birth_insn
= var_seq
;
6266 /* Now propagate the RTL assignment of each partition to the
6267 underlying var of each SSA_NAME. */
6268 for (i
= 1; i
< num_ssa_names
; i
++)
6270 tree name
= ssa_name (i
);
6273 /* We might have generated new SSA names in
6274 update_alias_info_with_stack_vars. They will have a NULL
6275 defining statements, and won't be part of the partitioning,
6277 || !SSA_NAME_DEF_STMT (name
))
6280 adjust_one_expanded_partition_var (name
);
6283 /* Clean up RTL of variables that straddle across multiple
6284 partitions, and check that the rtl of any PARM_DECLs that are not
6285 cleaned up is that of their default defs. */
6286 for (i
= 1; i
< num_ssa_names
; i
++)
6288 tree name
= ssa_name (i
);
6292 /* We might have generated new SSA names in
6293 update_alias_info_with_stack_vars. They will have a NULL
6294 defining statements, and won't be part of the partitioning,
6296 || !SSA_NAME_DEF_STMT (name
))
6298 part
= var_to_partition (SA
.map
, name
);
6299 if (part
== NO_PARTITION
)
6302 /* If this decl was marked as living in multiple places, reset
6303 this now to NULL. */
6304 tree var
= SSA_NAME_VAR (name
);
6305 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6306 SET_DECL_RTL (var
, NULL
);
6307 /* Check that the pseudos chosen by assign_parms are those of
6308 the corresponding default defs. */
6309 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6310 && (TREE_CODE (var
) == PARM_DECL
6311 || TREE_CODE (var
) == RESULT_DECL
))
6313 rtx in
= DECL_RTL_IF_SET (var
);
6315 rtx out
= SA
.partition_to_pseudo
[part
];
6316 gcc_assert (in
== out
);
6318 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6319 those expected by debug backends for each parm and for
6320 the result. This is particularly important for stabs,
6321 whose register elimination from parm's DECL_RTL may cause
6322 -fcompare-debug differences as SET_DECL_RTL changes reg's
6323 attrs. So, make sure the RTL already has the parm as the
6324 EXPR, so that it won't change. */
6325 SET_DECL_RTL (var
, NULL_RTX
);
6327 set_mem_attributes (in
, var
, true);
6328 SET_DECL_RTL (var
, in
);
6332 /* If this function is `main', emit a call to `__main'
6333 to run global initializers, etc. */
6334 if (DECL_NAME (current_function_decl
)
6335 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6336 && DECL_FILE_SCOPE_P (current_function_decl
))
6337 expand_main_function ();
6339 /* Initialize the stack_protect_guard field. This must happen after the
6340 call to __main (if any) so that the external decl is initialized. */
6341 if (crtl
->stack_protect_guard
)
6342 stack_protect_prologue ();
6344 expand_phi_nodes (&SA
);
6346 /* Release any stale SSA redirection data. */
6347 redirect_edge_var_map_empty ();
6349 /* Register rtl specific functions for cfg. */
6350 rtl_register_cfg_hooks ();
6352 init_block
= construct_init_block ();
6354 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6355 remaining edges later. */
6356 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6357 e
->flags
&= ~EDGE_EXECUTABLE
;
6359 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6360 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6362 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6364 if (MAY_HAVE_DEBUG_INSNS
)
6365 expand_debug_locations ();
6367 if (deep_ter_debug_map
)
6369 delete deep_ter_debug_map
;
6370 deep_ter_debug_map
= NULL
;
6373 /* Free stuff we no longer need after GIMPLE optimizations. */
6374 free_dominance_info (CDI_DOMINATORS
);
6375 free_dominance_info (CDI_POST_DOMINATORS
);
6376 delete_tree_cfg_annotations (fun
);
6378 timevar_push (TV_OUT_OF_SSA
);
6379 finish_out_of_ssa (&SA
);
6380 timevar_pop (TV_OUT_OF_SSA
);
6382 timevar_push (TV_POST_EXPAND
);
6383 /* We are no longer in SSA form. */
6384 fun
->gimple_df
->in_ssa_p
= false;
6385 loops_state_clear (LOOP_CLOSED_SSA
);
6387 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6388 conservatively to true until they are all profile aware. */
6389 delete lab_rtx_for_bb
;
6390 free_histograms (fun
);
6392 construct_exit_block ();
6393 insn_locations_finalize ();
6397 rtx_insn
*after
= return_label
;
6398 rtx_insn
*next
= NEXT_INSN (after
);
6399 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6401 emit_insn_after (var_ret_seq
, after
);
6404 /* Zap the tree EH table. */
6405 set_eh_throw_stmt_table (fun
, NULL
);
6407 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6408 split edges which edge insertions might do. */
6409 rebuild_jump_labels (get_insns ());
6411 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
),
6412 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6416 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6420 rebuild_jump_labels_chain (e
->insns
.r
);
6421 /* Put insns after parm birth, but before
6422 NOTE_INSNS_FUNCTION_BEG. */
6423 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (fun
)
6424 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6426 rtx_insn
*insns
= e
->insns
.r
;
6428 if (NOTE_P (parm_birth_insn
)
6429 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6430 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6432 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6435 commit_one_edge_insertion (e
);
6442 /* We're done expanding trees to RTL. */
6443 currently_expanding_to_rtl
= 0;
6445 flush_mark_addressable_queue ();
6447 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6448 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6452 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6454 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6455 e
->flags
&= ~EDGE_EXECUTABLE
;
6457 /* At the moment not all abnormal edges match the RTL
6458 representation. It is safe to remove them here as
6459 find_many_sub_basic_blocks will rediscover them.
6460 In the future we should get this fixed properly. */
6461 if ((e
->flags
& EDGE_ABNORMAL
)
6462 && !(e
->flags
& EDGE_SIBCALL
))
6469 blocks
= sbitmap_alloc (last_basic_block_for_fn (fun
));
6470 bitmap_ones (blocks
);
6471 find_many_sub_basic_blocks (blocks
);
6472 sbitmap_free (blocks
);
6473 purge_all_dead_edges ();
6475 expand_stack_alignment ();
6477 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6479 if (crtl
->tail_call_emit
)
6480 fixup_tail_calls ();
6482 /* After initial rtl generation, call back to finish generating
6483 exception support code. We need to do this before cleaning up
6484 the CFG as the code does not expect dead landing pads. */
6485 if (fun
->eh
->region_tree
!= NULL
)
6486 finish_eh_generation ();
6488 /* Remove unreachable blocks, otherwise we cannot compute dominators
6489 which are needed for loop state verification. As a side-effect
6490 this also compacts blocks.
6491 ??? We cannot remove trivially dead insns here as for example
6492 the DRAP reg on i?86 is not magically live at this point.
6493 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6494 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6496 checking_verify_flow_info ();
6498 /* Initialize pseudos allocated for hard registers. */
6499 emit_initial_value_sets ();
6501 /* And finally unshare all RTL. */
6504 /* There's no need to defer outputting this function any more; we
6505 know we want to output it. */
6506 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6508 /* Now that we're done expanding trees to RTL, we shouldn't have any
6509 more CONCATs anywhere. */
6510 generating_concat_p
= 0;
6515 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6516 /* And the pass manager will dump RTL for us. */
6519 /* If we're emitting a nested function, make sure its parent gets
6520 emitted as well. Doing otherwise confuses debug info. */
6523 for (parent
= DECL_CONTEXT (current_function_decl
);
6524 parent
!= NULL_TREE
;
6525 parent
= get_containing_scope (parent
))
6526 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6527 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6530 /* We are now committed to emitting code for this function. Do any
6531 preparation, such as emitting abstract debug info for the inline
6532 before it gets mangled by optimization. */
6533 if (cgraph_function_possibly_inlined_p (current_function_decl
))
6534 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
6536 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6538 /* After expanding, the return labels are no longer needed. */
6539 return_label
= NULL
;
6540 naked_return_label
= NULL
;
6542 /* After expanding, the tm_restart map is no longer needed. */
6543 if (fun
->gimple_df
->tm_restart
)
6544 fun
->gimple_df
->tm_restart
= NULL
;
6546 /* Tag the blocks with a depth number so that change_scope can find
6547 the common parent easily. */
6548 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6549 default_rtl_profile ();
6551 timevar_pop (TV_POST_EXPAND
);
6559 make_pass_expand (gcc::context
*ctxt
)
6561 return new pass_expand (ctxt
);