1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
51 #include "regs.h" /* For reg_renumber. */
52 #include "insn-attr.h" /* For INSN_SCHEDULING. */
54 /* This variable holds information helping the rewriting of SSA trees
58 /* This variable holds the currently expanded gimple statement for purposes
59 of comminucating the profile info to the builtin expanders. */
60 gimple currently_expanding_gimple_stmt
;
62 static rtx
expand_debug_expr (tree
);
64 /* Return an expression tree corresponding to the RHS of GIMPLE
68 gimple_assign_rhs_to_tree (gimple stmt
)
71 enum gimple_rhs_class grhs_class
;
73 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
75 if (grhs_class
== GIMPLE_TERNARY_RHS
)
76 t
= build3 (gimple_assign_rhs_code (stmt
),
77 TREE_TYPE (gimple_assign_lhs (stmt
)),
78 gimple_assign_rhs1 (stmt
),
79 gimple_assign_rhs2 (stmt
),
80 gimple_assign_rhs3 (stmt
));
81 else if (grhs_class
== GIMPLE_BINARY_RHS
)
82 t
= build2 (gimple_assign_rhs_code (stmt
),
83 TREE_TYPE (gimple_assign_lhs (stmt
)),
84 gimple_assign_rhs1 (stmt
),
85 gimple_assign_rhs2 (stmt
));
86 else if (grhs_class
== GIMPLE_UNARY_RHS
)
87 t
= build1 (gimple_assign_rhs_code (stmt
),
88 TREE_TYPE (gimple_assign_lhs (stmt
)),
89 gimple_assign_rhs1 (stmt
));
90 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
92 t
= gimple_assign_rhs1 (stmt
);
93 /* Avoid modifying this tree in place below. */
94 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
95 && gimple_location (stmt
) != EXPR_LOCATION (t
))
96 || (gimple_block (stmt
)
97 && currently_expanding_to_rtl
99 && gimple_block (stmt
) != TREE_BLOCK (t
)))
105 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
106 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
107 if (gimple_block (stmt
) && currently_expanding_to_rtl
&& EXPR_P (t
))
108 TREE_BLOCK (t
) = gimple_block (stmt
);
114 #ifndef STACK_ALIGNMENT_NEEDED
115 #define STACK_ALIGNMENT_NEEDED 1
118 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
120 /* Associate declaration T with storage space X. If T is no
121 SSA name this is exactly SET_DECL_RTL, otherwise make the
122 partition of T associated with X. */
124 set_rtl (tree t
, rtx x
)
126 if (TREE_CODE (t
) == SSA_NAME
)
128 SA
.partition_to_pseudo
[var_to_partition (SA
.map
, t
)] = x
;
130 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t
), x
);
131 /* For the benefit of debug information at -O0 (where vartracking
132 doesn't run) record the place also in the base DECL if it's
133 a normal variable (not a parameter). */
134 if (x
&& x
!= pc_rtx
&& TREE_CODE (SSA_NAME_VAR (t
)) == VAR_DECL
)
136 tree var
= SSA_NAME_VAR (t
);
137 /* If we don't yet have something recorded, just record it now. */
138 if (!DECL_RTL_SET_P (var
))
139 SET_DECL_RTL (var
, x
);
140 /* If we have it set already to "multiple places" don't
142 else if (DECL_RTL (var
) == pc_rtx
)
144 /* If we have something recorded and it's not the same place
145 as we want to record now, we have multiple partitions for the
146 same base variable, with different places. We can't just
147 randomly chose one, hence we have to say that we don't know.
148 This only happens with optimization, and there var-tracking
149 will figure out the right thing. */
150 else if (DECL_RTL (var
) != x
)
151 SET_DECL_RTL (var
, pc_rtx
);
158 /* This structure holds data relevant to one variable that will be
159 placed in a stack slot. */
165 /* Initially, the size of the variable. Later, the size of the partition,
166 if this variable becomes it's partition's representative. */
169 /* The *byte* alignment required for this variable. Or as, with the
170 size, the alignment for this partition. */
173 /* The partition representative. */
174 size_t representative
;
176 /* The next stack variable in the partition, or EOC. */
179 /* The numbers of conflicting stack variables. */
183 #define EOC ((size_t)-1)
185 /* We have an array of such objects while deciding allocation. */
186 static struct stack_var
*stack_vars
;
187 static size_t stack_vars_alloc
;
188 static size_t stack_vars_num
;
189 static struct pointer_map_t
*decl_to_stack_part
;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted
;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase
;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls
;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer
;
208 /* Compute the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 align_local_variable (tree decl
)
214 unsigned int align
= LOCAL_DECL_ALIGNMENT (decl
);
215 DECL_ALIGN (decl
) = align
;
216 return align
/ BITS_PER_UNIT
;
219 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
220 Return the frame offset. */
223 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
225 HOST_WIDE_INT offset
, new_frame_offset
;
227 new_frame_offset
= frame_offset
;
228 if (FRAME_GROWS_DOWNWARD
)
230 new_frame_offset
-= size
+ frame_phase
;
231 new_frame_offset
&= -align
;
232 new_frame_offset
+= frame_phase
;
233 offset
= new_frame_offset
;
237 new_frame_offset
-= frame_phase
;
238 new_frame_offset
+= align
- 1;
239 new_frame_offset
&= -align
;
240 new_frame_offset
+= frame_phase
;
241 offset
= new_frame_offset
;
242 new_frame_offset
+= size
;
244 frame_offset
= new_frame_offset
;
246 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
247 frame_offset
= offset
= 0;
252 /* Accumulate DECL into STACK_VARS. */
255 add_stack_var (tree decl
)
259 if (stack_vars_num
>= stack_vars_alloc
)
261 if (stack_vars_alloc
)
262 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
264 stack_vars_alloc
= 32;
266 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
268 if (!decl_to_stack_part
)
269 decl_to_stack_part
= pointer_map_create ();
271 v
= &stack_vars
[stack_vars_num
];
272 * (size_t *)pointer_map_insert (decl_to_stack_part
, decl
) = stack_vars_num
;
275 v
->size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl
)), 1);
276 /* Ensure that all variables have size, so that &a != &b for any two
277 variables that are simultaneously live. */
280 v
->alignb
= align_local_variable (SSAVAR (decl
));
281 /* An alignment of zero can mightily confuse us later. */
282 gcc_assert (v
->alignb
!= 0);
284 /* All variables are initially in their own partition. */
285 v
->representative
= stack_vars_num
;
288 /* All variables initially conflict with no other. */
291 /* Ensure that this decl doesn't get put onto the list twice. */
292 set_rtl (decl
, pc_rtx
);
297 /* Make the decls associated with luid's X and Y conflict. */
300 add_stack_var_conflict (size_t x
, size_t y
)
302 struct stack_var
*a
= &stack_vars
[x
];
303 struct stack_var
*b
= &stack_vars
[y
];
305 a
->conflicts
= BITMAP_ALLOC (NULL
);
307 b
->conflicts
= BITMAP_ALLOC (NULL
);
308 bitmap_set_bit (a
->conflicts
, y
);
309 bitmap_set_bit (b
->conflicts
, x
);
312 /* Check whether the decls associated with luid's X and Y conflict. */
315 stack_var_conflict_p (size_t x
, size_t y
)
317 struct stack_var
*a
= &stack_vars
[x
];
318 struct stack_var
*b
= &stack_vars
[y
];
321 /* Partitions containing an SSA name result from gimple registers
322 with things like unsupported modes. They are top-level and
323 hence conflict with everything else. */
324 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
327 if (!a
->conflicts
|| !b
->conflicts
)
329 return bitmap_bit_p (a
->conflicts
, y
);
332 /* Returns true if TYPE is or contains a union type. */
335 aggregate_contains_union_type (tree type
)
339 if (TREE_CODE (type
) == UNION_TYPE
340 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
342 if (TREE_CODE (type
) == ARRAY_TYPE
)
343 return aggregate_contains_union_type (TREE_TYPE (type
));
344 if (TREE_CODE (type
) != RECORD_TYPE
)
347 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
348 if (TREE_CODE (field
) == FIELD_DECL
)
349 if (aggregate_contains_union_type (TREE_TYPE (field
)))
355 /* A subroutine of expand_used_vars. If two variables X and Y have alias
356 sets that do not conflict, then do add a conflict for these variables
357 in the interference graph. We also need to make sure to add conflicts
358 for union containing structures. Else RTL alias analysis comes along
359 and due to type based aliasing rules decides that for two overlapping
360 union temporaries { short s; int i; } accesses to the same mem through
361 different types may not alias and happily reorders stores across
362 life-time boundaries of the temporaries (See PR25654). */
365 add_alias_set_conflicts (void)
367 size_t i
, j
, n
= stack_vars_num
;
369 for (i
= 0; i
< n
; ++i
)
371 tree type_i
= TREE_TYPE (stack_vars
[i
].decl
);
372 bool aggr_i
= AGGREGATE_TYPE_P (type_i
);
375 contains_union
= aggregate_contains_union_type (type_i
);
376 for (j
= 0; j
< i
; ++j
)
378 tree type_j
= TREE_TYPE (stack_vars
[j
].decl
);
379 bool aggr_j
= AGGREGATE_TYPE_P (type_j
);
381 /* Either the objects conflict by means of type based
382 aliasing rules, or we need to add a conflict. */
383 || !objects_must_conflict_p (type_i
, type_j
)
384 /* In case the types do not conflict ensure that access
385 to elements will conflict. In case of unions we have
386 to be careful as type based aliasing rules may say
387 access to the same memory does not conflict. So play
388 safe and add a conflict in this case when
389 -fstrict-aliasing is used. */
390 || (contains_union
&& flag_strict_aliasing
))
391 add_stack_var_conflict (i
, j
);
396 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
397 enter its partition number into bitmap DATA. */
400 visit_op (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
402 bitmap active
= (bitmap
)data
;
403 op
= get_base_address (op
);
406 && DECL_RTL_IF_SET (op
) == pc_rtx
)
408 size_t *v
= (size_t *) pointer_map_contains (decl_to_stack_part
, op
);
410 bitmap_set_bit (active
, *v
);
415 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
416 record conflicts between it and all currently active other partitions
420 visit_conflict (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
422 bitmap active
= (bitmap
)data
;
423 op
= get_base_address (op
);
426 && DECL_RTL_IF_SET (op
) == pc_rtx
)
429 (size_t *) pointer_map_contains (decl_to_stack_part
, op
);
430 if (v
&& bitmap_set_bit (active
, *v
))
435 gcc_assert (num
< stack_vars_num
);
436 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
437 add_stack_var_conflict (num
, i
);
443 /* Helper routine for add_scope_conflicts, calculating the active partitions
444 at the end of BB, leaving the result in WORK. We're called to generate
445 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
449 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
453 gimple_stmt_iterator gsi
;
454 bool (*visit
)(gimple
, tree
, void *);
457 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
458 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
462 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
464 gimple stmt
= gsi_stmt (gsi
);
465 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
467 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
469 gimple stmt
= gsi_stmt (gsi
);
471 if (gimple_clobber_p (stmt
))
473 tree lhs
= gimple_assign_lhs (stmt
);
475 /* Nested function lowering might introduce LHSs
476 that are COMPONENT_REFs. */
477 if (TREE_CODE (lhs
) != VAR_DECL
)
479 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
481 pointer_map_contains (decl_to_stack_part
, lhs
)))
482 bitmap_clear_bit (work
, *v
);
484 else if (!is_gimple_debug (stmt
))
487 && visit
== visit_op
)
489 /* If this is the first real instruction in this BB we need
490 to add conflicts for everything live at this point now.
491 Unlike classical liveness for named objects we can't
492 rely on seeing a def/use of the names we're interested in.
493 There might merely be indirect loads/stores. We'd not add any
494 conflicts for such partitions. */
497 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
501 EXECUTE_IF_SET_IN_BITMAP (work
, i
+ 1, j
, bj
)
502 add_stack_var_conflict (i
, j
);
504 visit
= visit_conflict
;
506 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
511 /* Generate stack partition conflicts between all partitions that are
512 simultaneously live. */
515 add_scope_conflicts (void)
519 bitmap work
= BITMAP_ALLOC (NULL
);
521 /* We approximate the live range of a stack variable by taking the first
522 mention of its name as starting point(s), and by the end-of-scope
523 death clobber added by gimplify as ending point(s) of the range.
524 This overapproximates in the case we for instance moved an address-taken
525 operation upward, without also moving a dereference to it upwards.
526 But it's conservatively correct as a variable never can hold values
527 before its name is mentioned at least once.
529 We then do a mostly classical bitmap liveness algorithm. */
532 bb
->aux
= BITMAP_ALLOC (NULL
);
540 bitmap active
= (bitmap
)bb
->aux
;
541 add_scope_conflicts_1 (bb
, work
, false);
542 if (bitmap_ior_into (active
, work
))
548 add_scope_conflicts_1 (bb
, work
, true);
552 BITMAP_FREE (bb
->aux
);
555 /* A subroutine of partition_stack_vars. A comparison function for qsort,
556 sorting an array of indices by the properties of the object. */
559 stack_var_cmp (const void *a
, const void *b
)
561 size_t ia
= *(const size_t *)a
;
562 size_t ib
= *(const size_t *)b
;
563 unsigned int aligna
= stack_vars
[ia
].alignb
;
564 unsigned int alignb
= stack_vars
[ib
].alignb
;
565 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
566 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
567 tree decla
= stack_vars
[ia
].decl
;
568 tree declb
= stack_vars
[ib
].decl
;
570 unsigned int uida
, uidb
;
572 /* Primary compare on "large" alignment. Large comes first. */
573 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
574 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
575 if (largea
!= largeb
)
576 return (int)largeb
- (int)largea
;
578 /* Secondary compare on size, decreasing */
584 /* Tertiary compare on true alignment, decreasing. */
590 /* Final compare on ID for sort stability, increasing.
591 Two SSA names are compared by their version, SSA names come before
592 non-SSA names, and two normal decls are compared by their DECL_UID. */
593 if (TREE_CODE (decla
) == SSA_NAME
)
595 if (TREE_CODE (declb
) == SSA_NAME
)
596 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
600 else if (TREE_CODE (declb
) == SSA_NAME
)
603 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
612 /* If the points-to solution *PI points to variables that are in a partition
613 together with other variables add all partition members to the pointed-to
617 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
618 struct pointer_map_t
*decls_to_partitions
,
619 struct pointer_set_t
*visited
, bitmap temp
)
627 /* The pointed-to vars bitmap is shared, it is enough to
629 || pointer_set_insert(visited
, pt
->vars
))
634 /* By using a temporary bitmap to store all members of the partitions
635 we have to add we make sure to visit each of the partitions only
637 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
639 || !bitmap_bit_p (temp
, i
))
640 && (part
= (bitmap
*) pointer_map_contains (decls_to_partitions
,
641 (void *)(size_t) i
)))
642 bitmap_ior_into (temp
, *part
);
643 if (!bitmap_empty_p (temp
))
644 bitmap_ior_into (pt
->vars
, temp
);
647 /* Update points-to sets based on partition info, so we can use them on RTL.
648 The bitmaps representing stack partitions will be saved until expand,
649 where partitioned decls used as bases in memory expressions will be
653 update_alias_info_with_stack_vars (void)
655 struct pointer_map_t
*decls_to_partitions
= NULL
;
657 tree var
= NULL_TREE
;
659 for (i
= 0; i
< stack_vars_num
; i
++)
663 struct ptr_info_def
*pi
;
665 /* Not interested in partitions with single variable. */
666 if (stack_vars
[i
].representative
!= i
667 || stack_vars
[i
].next
== EOC
)
670 if (!decls_to_partitions
)
672 decls_to_partitions
= pointer_map_create ();
673 cfun
->gimple_df
->decls_to_pointers
= pointer_map_create ();
676 /* Create an SSA_NAME that points to the partition for use
677 as base during alias-oracle queries on RTL for bases that
678 have been partitioned. */
679 if (var
== NULL_TREE
)
680 var
= create_tmp_var (ptr_type_node
, NULL
);
681 name
= make_ssa_name (var
, NULL
);
683 /* Create bitmaps representing partitions. They will be used for
684 points-to sets later, so use GGC alloc. */
685 part
= BITMAP_GGC_ALLOC ();
686 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
688 tree decl
= stack_vars
[j
].decl
;
689 unsigned int uid
= DECL_PT_UID (decl
);
690 /* We should never end up partitioning SSA names (though they
691 may end up on the stack). Neither should we allocate stack
692 space to something that is unused and thus unreferenced, except
693 for -O0 where we are preserving even unreferenced variables. */
694 gcc_assert (DECL_P (decl
)
696 || referenced_var_lookup (cfun
, DECL_UID (decl
))));
697 bitmap_set_bit (part
, uid
);
698 *((bitmap
*) pointer_map_insert (decls_to_partitions
,
699 (void *)(size_t) uid
)) = part
;
700 *((tree
*) pointer_map_insert (cfun
->gimple_df
->decls_to_pointers
,
704 /* Make the SSA name point to all partition members. */
705 pi
= get_ptr_info (name
);
706 pt_solution_set (&pi
->pt
, part
, false);
709 /* Make all points-to sets that contain one member of a partition
710 contain all members of the partition. */
711 if (decls_to_partitions
)
714 struct pointer_set_t
*visited
= pointer_set_create ();
715 bitmap temp
= BITMAP_ALLOC (NULL
);
717 for (i
= 1; i
< num_ssa_names
; i
++)
719 tree name
= ssa_name (i
);
720 struct ptr_info_def
*pi
;
723 && POINTER_TYPE_P (TREE_TYPE (name
))
724 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
725 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
729 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
730 decls_to_partitions
, visited
, temp
);
732 pointer_set_destroy (visited
);
733 pointer_map_destroy (decls_to_partitions
);
738 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
739 partitioning algorithm. Partitions A and B are known to be non-conflicting.
740 Merge them into a single partition A. */
743 union_stack_vars (size_t a
, size_t b
)
745 struct stack_var
*vb
= &stack_vars
[b
];
749 gcc_assert (stack_vars
[b
].next
== EOC
);
750 /* Add B to A's partition. */
751 stack_vars
[b
].next
= stack_vars
[a
].next
;
752 stack_vars
[b
].representative
= a
;
753 stack_vars
[a
].next
= b
;
755 /* Update the required alignment of partition A to account for B. */
756 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
757 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
759 /* Update the interference graph and merge the conflicts. */
762 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
763 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
764 BITMAP_FREE (vb
->conflicts
);
768 /* A subroutine of expand_used_vars. Binpack the variables into
769 partitions constrained by the interference graph. The overall
770 algorithm used is as follows:
772 Sort the objects by size in descending order.
777 Look for the largest non-conflicting object B with size <= S.
784 partition_stack_vars (void)
786 size_t si
, sj
, n
= stack_vars_num
;
788 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
789 for (si
= 0; si
< n
; ++si
)
790 stack_vars_sorted
[si
] = si
;
795 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
797 for (si
= 0; si
< n
; ++si
)
799 size_t i
= stack_vars_sorted
[si
];
800 unsigned int ialign
= stack_vars
[i
].alignb
;
802 /* Ignore objects that aren't partition representatives. If we
803 see a var that is not a partition representative, it must
804 have been merged earlier. */
805 if (stack_vars
[i
].representative
!= i
)
808 for (sj
= si
+ 1; sj
< n
; ++sj
)
810 size_t j
= stack_vars_sorted
[sj
];
811 unsigned int jalign
= stack_vars
[j
].alignb
;
813 /* Ignore objects that aren't partition representatives. */
814 if (stack_vars
[j
].representative
!= j
)
817 /* Ignore conflicting objects. */
818 if (stack_var_conflict_p (i
, j
))
821 /* Do not mix objects of "small" (supported) alignment
822 and "large" (unsupported) alignment. */
823 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
824 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
827 /* UNION the objects, placing J at OFFSET. */
828 union_stack_vars (i
, j
);
832 update_alias_info_with_stack_vars ();
835 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
838 dump_stack_var_partition (void)
840 size_t si
, i
, j
, n
= stack_vars_num
;
842 for (si
= 0; si
< n
; ++si
)
844 i
= stack_vars_sorted
[si
];
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars
[i
].representative
!= i
)
850 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
852 stack_vars
[i
].alignb
);
854 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
856 fputc ('\t', dump_file
);
857 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
859 fputc ('\n', dump_file
);
863 /* Assign rtl to DECL at BASE + OFFSET. */
866 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
867 HOST_WIDE_INT offset
)
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
875 x
= plus_constant (Pmode
, base
, offset
);
876 x
= gen_rtx_MEM (DECL_MODE (SSAVAR (decl
)), x
);
878 if (TREE_CODE (decl
) != SSA_NAME
)
880 /* Set alignment we actually gave this decl if it isn't an SSA name.
881 If it is we generate stack slots only accidentally so it isn't as
882 important, we'll simply use the alignment that is already set. */
883 if (base
== virtual_stack_vars_rtx
)
884 offset
-= frame_phase
;
885 align
= offset
& -offset
;
886 align
*= BITS_PER_UNIT
;
887 if (align
== 0 || align
> base_align
)
890 /* One would think that we could assert that we're not decreasing
891 alignment here, but (at least) the i386 port does exactly this
892 via the MINIMUM_ALIGNMENT hook. */
894 DECL_ALIGN (decl
) = align
;
895 DECL_USER_ALIGN (decl
) = 0;
898 set_mem_attributes (x
, SSAVAR (decl
), true);
902 /* A subroutine of expand_used_vars. Give each partition representative
903 a unique location within the stack frame. Update each partition member
904 with that location. */
907 expand_stack_vars (bool (*pred
) (tree
))
909 size_t si
, i
, j
, n
= stack_vars_num
;
910 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
911 rtx large_base
= NULL
;
912 unsigned large_align
= 0;
915 /* Determine if there are any variables requiring "large" alignment.
916 Since these are dynamically allocated, we only process these if
917 no predicate involved. */
918 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
919 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
921 /* Find the total size of these variables. */
922 for (si
= 0; si
< n
; ++si
)
926 i
= stack_vars_sorted
[si
];
927 alignb
= stack_vars
[i
].alignb
;
929 /* Stop when we get to the first decl with "small" alignment. */
930 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
933 /* Skip variables that aren't partition representatives. */
934 if (stack_vars
[i
].representative
!= i
)
937 /* Skip variables that have already had rtl assigned. See also
938 add_stack_var where we perpetrate this pc_rtx hack. */
939 decl
= stack_vars
[i
].decl
;
940 if ((TREE_CODE (decl
) == SSA_NAME
941 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
942 : DECL_RTL (decl
)) != pc_rtx
)
945 large_size
+= alignb
- 1;
946 large_size
&= -(HOST_WIDE_INT
)alignb
;
947 large_size
+= stack_vars
[i
].size
;
950 /* If there were any, allocate space. */
952 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
956 for (si
= 0; si
< n
; ++si
)
959 unsigned base_align
, alignb
;
960 HOST_WIDE_INT offset
;
962 i
= stack_vars_sorted
[si
];
964 /* Skip variables that aren't partition representatives, for now. */
965 if (stack_vars
[i
].representative
!= i
)
968 /* Skip variables that have already had rtl assigned. See also
969 add_stack_var where we perpetrate this pc_rtx hack. */
970 decl
= stack_vars
[i
].decl
;
971 if ((TREE_CODE (decl
) == SSA_NAME
972 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
973 : DECL_RTL (decl
)) != pc_rtx
)
976 /* Check the predicate to see whether this variable should be
977 allocated in this pass. */
978 if (pred
&& !pred (decl
))
981 alignb
= stack_vars
[i
].alignb
;
982 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
984 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
985 base
= virtual_stack_vars_rtx
;
986 base_align
= crtl
->max_used_stack_slot_alignment
;
990 /* Large alignment is only processed in the last pass. */
993 gcc_assert (large_base
!= NULL
);
995 large_alloc
+= alignb
- 1;
996 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
997 offset
= large_alloc
;
998 large_alloc
+= stack_vars
[i
].size
;
1001 base_align
= large_align
;
1004 /* Create rtl for each variable based on their location within the
1006 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1008 expand_one_stack_var_at (stack_vars
[j
].decl
,
1014 gcc_assert (large_alloc
== large_size
);
1017 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1018 static HOST_WIDE_INT
1019 account_stack_vars (void)
1021 size_t si
, j
, i
, n
= stack_vars_num
;
1022 HOST_WIDE_INT size
= 0;
1024 for (si
= 0; si
< n
; ++si
)
1026 i
= stack_vars_sorted
[si
];
1028 /* Skip variables that aren't partition representatives, for now. */
1029 if (stack_vars
[i
].representative
!= i
)
1032 size
+= stack_vars
[i
].size
;
1033 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1034 set_rtl (stack_vars
[j
].decl
, NULL
);
1039 /* A subroutine of expand_one_var. Called to immediately assign rtl
1040 to a variable to be allocated in the stack frame. */
1043 expand_one_stack_var (tree var
)
1045 HOST_WIDE_INT size
, offset
;
1046 unsigned byte_align
;
1048 size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var
)), 1);
1049 byte_align
= align_local_variable (SSAVAR (var
));
1051 /* We handle highly aligned variables in expand_stack_vars. */
1052 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1054 offset
= alloc_stack_frame_space (size
, byte_align
);
1056 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1057 crtl
->max_used_stack_slot_alignment
, offset
);
1060 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1061 that will reside in a hard register. */
1064 expand_one_hard_reg_var (tree var
)
1066 rest_of_decl_compilation (var
, 0, 0);
1069 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1070 that will reside in a pseudo register. */
1073 expand_one_register_var (tree var
)
1075 tree decl
= SSAVAR (var
);
1076 tree type
= TREE_TYPE (decl
);
1077 enum machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1078 rtx x
= gen_reg_rtx (reg_mode
);
1082 /* Note if the object is a user variable. */
1083 if (!DECL_ARTIFICIAL (decl
))
1086 if (POINTER_TYPE_P (type
))
1087 mark_reg_pointer (x
, get_pointer_alignment (var
));
1090 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1091 has some associated error, e.g. its type is error-mark. We just need
1092 to pick something that won't crash the rest of the compiler. */
1095 expand_one_error_var (tree var
)
1097 enum machine_mode mode
= DECL_MODE (var
);
1100 if (mode
== BLKmode
)
1101 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1102 else if (mode
== VOIDmode
)
1105 x
= gen_reg_rtx (mode
);
1107 SET_DECL_RTL (var
, x
);
1110 /* A subroutine of expand_one_var. VAR is a variable that will be
1111 allocated to the local stack frame. Return true if we wish to
1112 add VAR to STACK_VARS so that it will be coalesced with other
1113 variables. Return false to allocate VAR immediately.
1115 This function is used to reduce the number of variables considered
1116 for coalescing, which reduces the size of the quadratic problem. */
1119 defer_stack_allocation (tree var
, bool toplevel
)
1121 /* If stack protection is enabled, *all* stack variables must be deferred,
1122 so that we can re-order the strings to the top of the frame. */
1123 if (flag_stack_protect
)
1126 /* We handle "large" alignment via dynamic allocation. We want to handle
1127 this extra complication in only one place, so defer them. */
1128 if (DECL_ALIGN (var
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
1131 /* Variables in the outermost scope automatically conflict with
1132 every other variable. The only reason to want to defer them
1133 at all is that, after sorting, we can more efficiently pack
1134 small variables in the stack frame. Continue to defer at -O2. */
1135 if (toplevel
&& optimize
< 2)
1138 /* Without optimization, *most* variables are allocated from the
1139 stack, which makes the quadratic problem large exactly when we
1140 want compilation to proceed as quickly as possible. On the
1141 other hand, we don't want the function's stack frame size to
1142 get completely out of hand. So we avoid adding scalars and
1143 "small" aggregates to the list at all. */
1144 if (optimize
== 0 && tree_low_cst (DECL_SIZE_UNIT (var
), 1) < 32)
1150 /* A subroutine of expand_used_vars. Expand one variable according to
1151 its flavor. Variables to be placed on the stack are not actually
1152 expanded yet, merely recorded.
1153 When REALLY_EXPAND is false, only add stack values to be allocated.
1154 Return stack usage this variable is supposed to take.
1157 static HOST_WIDE_INT
1158 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1160 unsigned int align
= BITS_PER_UNIT
;
1165 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
1167 /* Because we don't know if VAR will be in register or on stack,
1168 we conservatively assume it will be on stack even if VAR is
1169 eventually put into register after RA pass. For non-automatic
1170 variables, which won't be on stack, we collect alignment of
1171 type and ignore user specified alignment. */
1172 if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1173 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1174 TYPE_MODE (TREE_TYPE (var
)),
1175 TYPE_ALIGN (TREE_TYPE (var
)));
1176 else if (DECL_HAS_VALUE_EXPR_P (var
)
1177 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1178 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1179 or variables which were assigned a stack slot already by
1180 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1181 changed from the offset chosen to it. */
1182 align
= crtl
->stack_alignment_estimated
;
1184 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1186 /* If the variable alignment is very large we'll dynamicaly allocate
1187 it, which means that in-frame portion is just a pointer. */
1188 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1189 align
= POINTER_SIZE
;
1192 if (SUPPORTS_STACK_ALIGNMENT
1193 && crtl
->stack_alignment_estimated
< align
)
1195 /* stack_alignment_estimated shouldn't change after stack
1196 realign decision made */
1197 gcc_assert(!crtl
->stack_realign_processed
);
1198 crtl
->stack_alignment_estimated
= align
;
1201 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1202 So here we only make sure stack_alignment_needed >= align. */
1203 if (crtl
->stack_alignment_needed
< align
)
1204 crtl
->stack_alignment_needed
= align
;
1205 if (crtl
->max_used_stack_slot_alignment
< align
)
1206 crtl
->max_used_stack_slot_alignment
= align
;
1208 if (TREE_CODE (origvar
) == SSA_NAME
)
1210 gcc_assert (TREE_CODE (var
) != VAR_DECL
1211 || (!DECL_EXTERNAL (var
)
1212 && !DECL_HAS_VALUE_EXPR_P (var
)
1213 && !TREE_STATIC (var
)
1214 && TREE_TYPE (var
) != error_mark_node
1215 && !DECL_HARD_REGISTER (var
)
1218 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1220 else if (DECL_EXTERNAL (var
))
1222 else if (DECL_HAS_VALUE_EXPR_P (var
))
1224 else if (TREE_STATIC (var
))
1226 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1228 else if (TREE_TYPE (var
) == error_mark_node
)
1231 expand_one_error_var (var
);
1233 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1236 expand_one_hard_reg_var (var
);
1238 else if (use_register_for_decl (var
))
1241 expand_one_register_var (origvar
);
1243 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1245 /* Reject variables which cover more than half of the address-space. */
1248 error ("size of variable %q+D is too large", var
);
1249 expand_one_error_var (var
);
1252 else if (defer_stack_allocation (var
, toplevel
))
1253 add_stack_var (origvar
);
1257 expand_one_stack_var (origvar
);
1258 return tree_low_cst (DECL_SIZE_UNIT (var
), 1);
1263 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1264 expanding variables. Those variables that can be put into registers
1265 are allocated pseudos; those that can't are put on the stack.
1267 TOPLEVEL is true if this is the outermost BLOCK. */
1270 expand_used_vars_for_block (tree block
, bool toplevel
)
1274 /* Expand all variables at this level. */
1275 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1277 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1278 || !DECL_NONSHAREABLE (t
)))
1279 expand_one_var (t
, toplevel
, true);
1281 /* Expand all variables at containing levels. */
1282 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1283 expand_used_vars_for_block (t
, false);
1286 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1287 and clear TREE_USED on all local variables. */
1290 clear_tree_used (tree block
)
1294 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1295 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1296 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1297 || !DECL_NONSHAREABLE (t
))
1300 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1301 clear_tree_used (t
);
1304 /* Examine TYPE and determine a bit mask of the following features. */
1306 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1307 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1308 #define SPCT_HAS_ARRAY 4
1309 #define SPCT_HAS_AGGREGATE 8
1312 stack_protect_classify_type (tree type
)
1314 unsigned int ret
= 0;
1317 switch (TREE_CODE (type
))
1320 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1321 if (t
== char_type_node
1322 || t
== signed_char_type_node
1323 || t
== unsigned_char_type_node
)
1325 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1326 unsigned HOST_WIDE_INT len
;
1328 if (!TYPE_SIZE_UNIT (type
)
1329 || !host_integerp (TYPE_SIZE_UNIT (type
), 1))
1332 len
= tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
1335 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1337 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1340 ret
= SPCT_HAS_ARRAY
;
1344 case QUAL_UNION_TYPE
:
1346 ret
= SPCT_HAS_AGGREGATE
;
1347 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1348 if (TREE_CODE (t
) == FIELD_DECL
)
1349 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1359 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1360 part of the local stack frame. Remember if we ever return nonzero for
1361 any variable in this function. The return value is the phase number in
1362 which the variable should be allocated. */
1365 stack_protect_decl_phase (tree decl
)
1367 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1370 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1371 has_short_buffer
= true;
1373 if (flag_stack_protect
== 2)
1375 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1376 && !(bits
& SPCT_HAS_AGGREGATE
))
1378 else if (bits
& SPCT_HAS_ARRAY
)
1382 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1385 has_protected_decls
= true;
1390 /* Two helper routines that check for phase 1 and phase 2. These are used
1391 as callbacks for expand_stack_vars. */
1394 stack_protect_decl_phase_1 (tree decl
)
1396 return stack_protect_decl_phase (decl
) == 1;
1400 stack_protect_decl_phase_2 (tree decl
)
1402 return stack_protect_decl_phase (decl
) == 2;
1405 /* Ensure that variables in different stack protection phases conflict
1406 so that they are not merged and share the same stack slot. */
1409 add_stack_protection_conflicts (void)
1411 size_t i
, j
, n
= stack_vars_num
;
1412 unsigned char *phase
;
1414 phase
= XNEWVEC (unsigned char, n
);
1415 for (i
= 0; i
< n
; ++i
)
1416 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1418 for (i
= 0; i
< n
; ++i
)
1420 unsigned char ph_i
= phase
[i
];
1421 for (j
= 0; j
< i
; ++j
)
1422 if (ph_i
!= phase
[j
])
1423 add_stack_var_conflict (i
, j
);
1429 /* Create a decl for the guard at the top of the stack frame. */
1432 create_stack_guard (void)
1434 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1435 VAR_DECL
, NULL
, ptr_type_node
);
1436 TREE_THIS_VOLATILE (guard
) = 1;
1437 TREE_USED (guard
) = 1;
1438 expand_one_stack_var (guard
);
1439 crtl
->stack_protect_guard
= guard
;
1442 /* Prepare for expanding variables. */
1444 init_vars_expansion (void)
1448 /* Set TREE_USED on all variables in the local_decls. */
1449 FOR_EACH_LOCAL_DECL (cfun
, ix
, t
)
1452 /* Clear TREE_USED on all variables associated with a block scope. */
1453 clear_tree_used (DECL_INITIAL (current_function_decl
));
1455 /* Initialize local stack smashing state. */
1456 has_protected_decls
= false;
1457 has_short_buffer
= false;
1460 /* Free up stack variable graph data. */
1462 fini_vars_expansion (void)
1464 size_t i
, n
= stack_vars_num
;
1465 for (i
= 0; i
< n
; i
++)
1466 BITMAP_FREE (stack_vars
[i
].conflicts
);
1467 XDELETEVEC (stack_vars
);
1468 XDELETEVEC (stack_vars_sorted
);
1470 stack_vars_alloc
= stack_vars_num
= 0;
1471 pointer_map_destroy (decl_to_stack_part
);
1472 decl_to_stack_part
= NULL
;
1475 /* Make a fair guess for the size of the stack frame of the function
1476 in NODE. This doesn't have to be exact, the result is only used in
1477 the inline heuristics. So we don't want to run the full stack var
1478 packing algorithm (which is quadratic in the number of stack vars).
1479 Instead, we calculate the total size of all stack vars. This turns
1480 out to be a pretty fair estimate -- packing of stack vars doesn't
1481 happen very often. */
1484 estimated_stack_frame_size (struct cgraph_node
*node
)
1486 HOST_WIDE_INT size
= 0;
1489 tree old_cur_fun_decl
= current_function_decl
;
1490 referenced_var_iterator rvi
;
1491 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->symbol
.decl
);
1493 current_function_decl
= node
->symbol
.decl
;
1496 gcc_checking_assert (gimple_referenced_vars (fn
));
1497 FOR_EACH_REFERENCED_VAR (fn
, var
, rvi
)
1498 size
+= expand_one_var (var
, true, false);
1500 if (stack_vars_num
> 0)
1502 /* Fake sorting the stack vars for account_stack_vars (). */
1503 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1504 for (i
= 0; i
< stack_vars_num
; ++i
)
1505 stack_vars_sorted
[i
] = i
;
1506 size
+= account_stack_vars ();
1507 fini_vars_expansion ();
1510 current_function_decl
= old_cur_fun_decl
;
1514 /* Expand all variables used in the function. */
1517 expand_used_vars (void)
1519 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1520 VEC(tree
,heap
) *maybe_local_decls
= NULL
;
1524 /* Compute the phase of the stack frame for this function. */
1526 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1527 int off
= STARTING_FRAME_OFFSET
% align
;
1528 frame_phase
= off
? align
- off
: 0;
1531 init_vars_expansion ();
1533 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
1535 tree var
= partition_to_var (SA
.map
, i
);
1537 gcc_assert (is_gimple_reg (var
));
1538 if (TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
1539 expand_one_var (var
, true, true);
1542 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1543 contain the default def (representing the parm or result itself)
1544 we don't do anything here. But those which don't contain the
1545 default def (representing a temporary based on the parm/result)
1546 we need to allocate space just like for normal VAR_DECLs. */
1547 if (!bitmap_bit_p (SA
.partition_has_default_def
, i
))
1549 expand_one_var (var
, true, true);
1550 gcc_assert (SA
.partition_to_pseudo
[i
]);
1555 /* At this point all variables on the local_decls with TREE_USED
1556 set are not associated with any block scope. Lay them out. */
1558 len
= VEC_length (tree
, cfun
->local_decls
);
1559 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1561 bool expand_now
= false;
1563 /* Expanded above already. */
1564 if (is_gimple_reg (var
))
1566 TREE_USED (var
) = 0;
1569 /* We didn't set a block for static or extern because it's hard
1570 to tell the difference between a global variable (re)declared
1571 in a local scope, and one that's really declared there to
1572 begin with. And it doesn't really matter much, since we're
1573 not giving them stack space. Expand them now. */
1574 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1577 /* If the variable is not associated with any block, then it
1578 was created by the optimizers, and could be live anywhere
1580 else if (TREE_USED (var
))
1583 /* Finally, mark all variables on the list as used. We'll use
1584 this in a moment when we expand those associated with scopes. */
1585 TREE_USED (var
) = 1;
1588 expand_one_var (var
, true, true);
1591 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
1593 rtx rtl
= DECL_RTL_IF_SET (var
);
1595 /* Keep artificial non-ignored vars in cfun->local_decls
1596 chain until instantiate_decls. */
1597 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1598 add_local_decl (cfun
, var
);
1599 else if (rtl
== NULL_RTX
)
1600 /* If rtl isn't set yet, which can happen e.g. with
1601 -fstack-protector, retry before returning from this
1603 VEC_safe_push (tree
, heap
, maybe_local_decls
, var
);
1607 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1609 +-----------------+-----------------+
1610 | ...processed... | ...duplicates...|
1611 +-----------------+-----------------+
1613 +-- LEN points here.
1615 We just want the duplicates, as those are the artificial
1616 non-ignored vars that we want to keep until instantiate_decls.
1617 Move them down and truncate the array. */
1618 if (!VEC_empty (tree
, cfun
->local_decls
))
1619 VEC_block_remove (tree
, cfun
->local_decls
, 0, len
);
1621 /* At this point, all variables within the block tree with TREE_USED
1622 set are actually used by the optimized function. Lay them out. */
1623 expand_used_vars_for_block (outer_block
, true);
1625 if (stack_vars_num
> 0)
1627 add_scope_conflicts ();
1628 /* Due to the way alias sets work, no variables with non-conflicting
1629 alias sets may be assigned the same address. Add conflicts to
1631 add_alias_set_conflicts ();
1633 /* If stack protection is enabled, we don't share space between
1634 vulnerable data and non-vulnerable data. */
1635 if (flag_stack_protect
)
1636 add_stack_protection_conflicts ();
1638 /* Now that we have collected all stack variables, and have computed a
1639 minimal interference graph, attempt to save some stack space. */
1640 partition_stack_vars ();
1642 dump_stack_var_partition ();
1645 /* There are several conditions under which we should create a
1646 stack guard: protect-all, alloca used, protected decls present. */
1647 if (flag_stack_protect
== 2
1648 || (flag_stack_protect
1649 && (cfun
->calls_alloca
|| has_protected_decls
)))
1650 create_stack_guard ();
1652 /* Assign rtl to each variable based on these partitions. */
1653 if (stack_vars_num
> 0)
1655 /* Reorder decls to be protected by iterating over the variables
1656 array multiple times, and allocating out of each phase in turn. */
1657 /* ??? We could probably integrate this into the qsort we did
1658 earlier, such that we naturally see these variables first,
1659 and thus naturally allocate things in the right order. */
1660 if (has_protected_decls
)
1662 /* Phase 1 contains only character arrays. */
1663 expand_stack_vars (stack_protect_decl_phase_1
);
1665 /* Phase 2 contains other kinds of arrays. */
1666 if (flag_stack_protect
== 2)
1667 expand_stack_vars (stack_protect_decl_phase_2
);
1670 expand_stack_vars (NULL
);
1672 fini_vars_expansion ();
1675 /* If there were any artificial non-ignored vars without rtl
1676 found earlier, see if deferred stack allocation hasn't assigned
1678 FOR_EACH_VEC_ELT_REVERSE (tree
, maybe_local_decls
, i
, var
)
1680 rtx rtl
= DECL_RTL_IF_SET (var
);
1682 /* Keep artificial non-ignored vars in cfun->local_decls
1683 chain until instantiate_decls. */
1684 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1685 add_local_decl (cfun
, var
);
1687 VEC_free (tree
, heap
, maybe_local_decls
);
1689 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1690 if (STACK_ALIGNMENT_NEEDED
)
1692 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1693 if (!FRAME_GROWS_DOWNWARD
)
1694 frame_offset
+= align
- 1;
1695 frame_offset
&= -align
;
1700 /* If we need to produce a detailed dump, print the tree representation
1701 for STMT to the dump file. SINCE is the last RTX after which the RTL
1702 generated for STMT should have been appended. */
1705 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx since
)
1707 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1709 fprintf (dump_file
, "\n;; ");
1710 print_gimple_stmt (dump_file
, stmt
, 0,
1711 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
1712 fprintf (dump_file
, "\n");
1714 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
1718 /* Maps the blocks that do not contain tree labels to rtx labels. */
1720 static struct pointer_map_t
*lab_rtx_for_bb
;
1722 /* Returns the label_rtx expression for a label starting basic block BB. */
1725 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
1727 gimple_stmt_iterator gsi
;
1732 if (bb
->flags
& BB_RTL
)
1733 return block_label (bb
);
1735 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
1739 /* Find the tree label if it is present. */
1741 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1743 lab_stmt
= gsi_stmt (gsi
);
1744 if (gimple_code (lab_stmt
) != GIMPLE_LABEL
)
1747 lab
= gimple_label_label (lab_stmt
);
1748 if (DECL_NONLOCAL (lab
))
1751 return label_rtx (lab
);
1754 elt
= pointer_map_insert (lab_rtx_for_bb
, bb
);
1755 *elt
= gen_label_rtx ();
1760 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1761 of a basic block where we just expanded the conditional at the end,
1762 possibly clean up the CFG and instruction sequence. LAST is the
1763 last instruction before the just emitted jump sequence. */
1766 maybe_cleanup_end_of_block (edge e
, rtx last
)
1768 /* Special case: when jumpif decides that the condition is
1769 trivial it emits an unconditional jump (and the necessary
1770 barrier). But we still have two edges, the fallthru one is
1771 wrong. purge_dead_edges would clean this up later. Unfortunately
1772 we have to insert insns (and split edges) before
1773 find_many_sub_basic_blocks and hence before purge_dead_edges.
1774 But splitting edges might create new blocks which depend on the
1775 fact that if there are two edges there's no barrier. So the
1776 barrier would get lost and verify_flow_info would ICE. Instead
1777 of auditing all edge splitters to care for the barrier (which
1778 normally isn't there in a cleaned CFG), fix it here. */
1779 if (BARRIER_P (get_last_insn ()))
1783 /* Now, we have a single successor block, if we have insns to
1784 insert on the remaining edge we potentially will insert
1785 it at the end of this block (if the dest block isn't feasible)
1786 in order to avoid splitting the edge. This insertion will take
1787 place in front of the last jump. But we might have emitted
1788 multiple jumps (conditional and one unconditional) to the
1789 same destination. Inserting in front of the last one then
1790 is a problem. See PR 40021. We fix this by deleting all
1791 jumps except the last unconditional one. */
1792 insn
= PREV_INSN (get_last_insn ());
1793 /* Make sure we have an unconditional jump. Otherwise we're
1795 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
1796 for (insn
= PREV_INSN (insn
); insn
!= last
;)
1798 insn
= PREV_INSN (insn
);
1799 if (JUMP_P (NEXT_INSN (insn
)))
1801 if (!any_condjump_p (NEXT_INSN (insn
)))
1803 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
1804 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
1806 delete_insn (NEXT_INSN (insn
));
1812 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1813 Returns a new basic block if we've terminated the current basic
1814 block and created a new one. */
1817 expand_gimple_cond (basic_block bb
, gimple stmt
)
1819 basic_block new_bb
, dest
;
1824 enum tree_code code
;
1827 code
= gimple_cond_code (stmt
);
1828 op0
= gimple_cond_lhs (stmt
);
1829 op1
= gimple_cond_rhs (stmt
);
1830 /* We're sometimes presented with such code:
1834 This would expand to two comparisons which then later might
1835 be cleaned up by combine. But some pattern matchers like if-conversion
1836 work better when there's only one compare, so make up for this
1837 here as special exception if TER would have made the same change. */
1838 if (gimple_cond_single_var_p (stmt
)
1840 && TREE_CODE (op0
) == SSA_NAME
1841 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
1843 gimple second
= SSA_NAME_DEF_STMT (op0
);
1844 if (gimple_code (second
) == GIMPLE_ASSIGN
)
1846 enum tree_code code2
= gimple_assign_rhs_code (second
);
1847 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
1850 op0
= gimple_assign_rhs1 (second
);
1851 op1
= gimple_assign_rhs2 (second
);
1853 /* If jumps are cheap turn some more codes into
1855 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1857 if ((code2
== BIT_AND_EXPR
1858 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
1859 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
1860 || code2
== TRUTH_AND_EXPR
)
1862 code
= TRUTH_ANDIF_EXPR
;
1863 op0
= gimple_assign_rhs1 (second
);
1864 op1
= gimple_assign_rhs2 (second
);
1866 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
1868 code
= TRUTH_ORIF_EXPR
;
1869 op0
= gimple_assign_rhs1 (second
);
1870 op1
= gimple_assign_rhs2 (second
);
1876 last2
= last
= get_last_insn ();
1878 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1879 set_curr_insn_source_location (gimple_location (stmt
));
1880 set_curr_insn_block (gimple_block (stmt
));
1882 /* These flags have no purpose in RTL land. */
1883 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
1884 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
1886 /* We can either have a pure conditional jump with one fallthru edge or
1887 two-way jump that needs to be decomposed into two basic blocks. */
1888 if (false_edge
->dest
== bb
->next_bb
)
1890 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1891 true_edge
->probability
);
1892 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1893 if (true_edge
->goto_locus
)
1895 set_curr_insn_source_location (true_edge
->goto_locus
);
1896 set_curr_insn_block (true_edge
->goto_block
);
1897 true_edge
->goto_locus
= curr_insn_locator ();
1899 true_edge
->goto_block
= NULL
;
1900 false_edge
->flags
|= EDGE_FALLTHRU
;
1901 maybe_cleanup_end_of_block (false_edge
, last
);
1904 if (true_edge
->dest
== bb
->next_bb
)
1906 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
1907 false_edge
->probability
);
1908 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1909 if (false_edge
->goto_locus
)
1911 set_curr_insn_source_location (false_edge
->goto_locus
);
1912 set_curr_insn_block (false_edge
->goto_block
);
1913 false_edge
->goto_locus
= curr_insn_locator ();
1915 false_edge
->goto_block
= NULL
;
1916 true_edge
->flags
|= EDGE_FALLTHRU
;
1917 maybe_cleanup_end_of_block (true_edge
, last
);
1921 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1922 true_edge
->probability
);
1923 last
= get_last_insn ();
1924 if (false_edge
->goto_locus
)
1926 set_curr_insn_source_location (false_edge
->goto_locus
);
1927 set_curr_insn_block (false_edge
->goto_block
);
1928 false_edge
->goto_locus
= curr_insn_locator ();
1930 false_edge
->goto_block
= NULL
;
1931 emit_jump (label_rtx_for_bb (false_edge
->dest
));
1934 if (BARRIER_P (BB_END (bb
)))
1935 BB_END (bb
) = PREV_INSN (BB_END (bb
));
1936 update_bb_for_insn (bb
);
1938 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
1939 dest
= false_edge
->dest
;
1940 redirect_edge_succ (false_edge
, new_bb
);
1941 false_edge
->flags
|= EDGE_FALLTHRU
;
1942 new_bb
->count
= false_edge
->count
;
1943 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
1944 if (current_loops
&& bb
->loop_father
)
1945 add_bb_to_loop (new_bb
, bb
->loop_father
);
1946 new_edge
= make_edge (new_bb
, dest
, 0);
1947 new_edge
->probability
= REG_BR_PROB_BASE
;
1948 new_edge
->count
= new_bb
->count
;
1949 if (BARRIER_P (BB_END (new_bb
)))
1950 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
1951 update_bb_for_insn (new_bb
);
1953 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
1955 if (true_edge
->goto_locus
)
1957 set_curr_insn_source_location (true_edge
->goto_locus
);
1958 set_curr_insn_block (true_edge
->goto_block
);
1959 true_edge
->goto_locus
= curr_insn_locator ();
1961 true_edge
->goto_block
= NULL
;
1966 /* Mark all calls that can have a transaction restart. */
1969 mark_transaction_restart_calls (gimple stmt
)
1971 struct tm_restart_node dummy
;
1974 if (!cfun
->gimple_df
->tm_restart
)
1978 slot
= htab_find_slot (cfun
->gimple_df
->tm_restart
, &dummy
, NO_INSERT
);
1981 struct tm_restart_node
*n
= (struct tm_restart_node
*) *slot
;
1982 tree list
= n
->label_or_list
;
1985 for (insn
= next_real_insn (get_last_insn ());
1987 insn
= next_real_insn (insn
))
1990 if (TREE_CODE (list
) == LABEL_DECL
)
1991 add_reg_note (insn
, REG_TM
, label_rtx (list
));
1993 for (; list
; list
= TREE_CHAIN (list
))
1994 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
1998 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2002 expand_call_stmt (gimple stmt
)
2004 tree exp
, decl
, lhs
;
2008 if (gimple_call_internal_p (stmt
))
2010 expand_internal_call (stmt
);
2014 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2016 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2017 decl
= gimple_call_fndecl (stmt
);
2018 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
2020 /* If this is not a builtin function, the function type through which the
2021 call is made may be different from the type of the function. */
2024 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2025 CALL_EXPR_FN (exp
));
2027 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2028 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2030 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2032 tree arg
= gimple_call_arg (stmt
, i
);
2034 /* TER addresses into arguments of builtin functions so we have a
2035 chance to infer more correct alignment information. See PR39954. */
2037 && TREE_CODE (arg
) == SSA_NAME
2038 && (def
= get_gimple_for_ssa_name (arg
))
2039 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2040 arg
= gimple_assign_rhs1 (def
);
2041 CALL_EXPR_ARG (exp
, i
) = arg
;
2044 if (gimple_has_side_effects (stmt
))
2045 TREE_SIDE_EFFECTS (exp
) = 1;
2047 if (gimple_call_nothrow_p (stmt
))
2048 TREE_NOTHROW (exp
) = 1;
2050 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2051 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2053 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
2054 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
2055 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
2056 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2058 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2059 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2060 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2061 TREE_BLOCK (exp
) = gimple_block (stmt
);
2063 /* Ensure RTL is created for debug args. */
2064 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2066 VEC(tree
, gc
) **debug_args
= decl_debug_args_lookup (decl
);
2071 for (ix
= 1; VEC_iterate (tree
, *debug_args
, ix
, dtemp
); ix
+= 2)
2073 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2074 expand_debug_expr (dtemp
);
2078 lhs
= gimple_call_lhs (stmt
);
2080 expand_assignment (lhs
, exp
, false);
2082 expand_expr_real_1 (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
, NULL
);
2084 mark_transaction_restart_calls (stmt
);
2087 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2088 STMT that doesn't require special handling for outgoing edges. That
2089 is no tailcalls and no GIMPLE_COND. */
2092 expand_gimple_stmt_1 (gimple stmt
)
2096 set_curr_insn_source_location (gimple_location (stmt
));
2097 set_curr_insn_block (gimple_block (stmt
));
2099 switch (gimple_code (stmt
))
2102 op0
= gimple_goto_dest (stmt
);
2103 if (TREE_CODE (op0
) == LABEL_DECL
)
2106 expand_computed_goto (op0
);
2109 expand_label (gimple_label_label (stmt
));
2112 case GIMPLE_PREDICT
:
2118 expand_asm_stmt (stmt
);
2121 expand_call_stmt (stmt
);
2125 op0
= gimple_return_retval (stmt
);
2127 if (op0
&& op0
!= error_mark_node
)
2129 tree result
= DECL_RESULT (current_function_decl
);
2131 /* If we are not returning the current function's RESULT_DECL,
2132 build an assignment to it. */
2135 /* I believe that a function's RESULT_DECL is unique. */
2136 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
2138 /* ??? We'd like to use simply expand_assignment here,
2139 but this fails if the value is of BLKmode but the return
2140 decl is a register. expand_return has special handling
2141 for this combination, which eventually should move
2142 to common code. See comments there. Until then, let's
2143 build a modify expression :-/ */
2144 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
2149 expand_null_return ();
2151 expand_return (op0
);
2156 tree lhs
= gimple_assign_lhs (stmt
);
2158 /* Tree expand used to fiddle with |= and &= of two bitfield
2159 COMPONENT_REFs here. This can't happen with gimple, the LHS
2160 of binary assigns must be a gimple reg. */
2162 if (TREE_CODE (lhs
) != SSA_NAME
2163 || get_gimple_rhs_class (gimple_expr_code (stmt
))
2164 == GIMPLE_SINGLE_RHS
)
2166 tree rhs
= gimple_assign_rhs1 (stmt
);
2167 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
2168 == GIMPLE_SINGLE_RHS
);
2169 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
))
2170 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
2171 if (TREE_CLOBBER_P (rhs
))
2172 /* This is a clobber to mark the going out of scope for
2176 expand_assignment (lhs
, rhs
,
2177 gimple_assign_nontemporal_move_p (stmt
));
2182 bool nontemporal
= gimple_assign_nontemporal_move_p (stmt
);
2183 struct separate_ops ops
;
2184 bool promoted
= false;
2186 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
2187 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
2190 ops
.code
= gimple_assign_rhs_code (stmt
);
2191 ops
.type
= TREE_TYPE (lhs
);
2192 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
2194 case GIMPLE_TERNARY_RHS
:
2195 ops
.op2
= gimple_assign_rhs3 (stmt
);
2197 case GIMPLE_BINARY_RHS
:
2198 ops
.op1
= gimple_assign_rhs2 (stmt
);
2200 case GIMPLE_UNARY_RHS
:
2201 ops
.op0
= gimple_assign_rhs1 (stmt
);
2206 ops
.location
= gimple_location (stmt
);
2208 /* If we want to use a nontemporal store, force the value to
2209 register first. If we store into a promoted register,
2210 don't directly expand to target. */
2211 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
2212 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
2219 int unsignedp
= SUBREG_PROMOTED_UNSIGNED_P (target
);
2220 /* If TEMP is a VOIDmode constant, use convert_modes to make
2221 sure that we properly convert it. */
2222 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
2224 temp
= convert_modes (GET_MODE (target
),
2225 TYPE_MODE (ops
.type
),
2227 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
2228 GET_MODE (target
), temp
, unsignedp
);
2231 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
2233 else if (nontemporal
&& emit_storent_insn (target
, temp
))
2237 temp
= force_operand (temp
, target
);
2239 emit_move_insn (target
, temp
);
2250 /* Expand one gimple statement STMT and return the last RTL instruction
2251 before any of the newly generated ones.
2253 In addition to generating the necessary RTL instructions this also
2254 sets REG_EH_REGION notes if necessary and sets the current source
2255 location for diagnostics. */
2258 expand_gimple_stmt (gimple stmt
)
2260 location_t saved_location
= input_location
;
2261 rtx last
= get_last_insn ();
2266 /* We need to save and restore the current source location so that errors
2267 discovered during expansion are emitted with the right location. But
2268 it would be better if the diagnostic routines used the source location
2269 embedded in the tree nodes rather than globals. */
2270 if (gimple_has_location (stmt
))
2271 input_location
= gimple_location (stmt
);
2273 expand_gimple_stmt_1 (stmt
);
2275 /* Free any temporaries used to evaluate this statement. */
2278 input_location
= saved_location
;
2280 /* Mark all insns that may trap. */
2281 lp_nr
= lookup_stmt_eh_lp (stmt
);
2285 for (insn
= next_real_insn (last
); insn
;
2286 insn
= next_real_insn (insn
))
2288 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2289 /* If we want exceptions for non-call insns, any
2290 may_trap_p instruction may throw. */
2291 && GET_CODE (PATTERN (insn
)) != CLOBBER
2292 && GET_CODE (PATTERN (insn
)) != USE
2293 && insn_could_throw_p (insn
))
2294 make_reg_eh_region_note (insn
, 0, lp_nr
);
2301 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2302 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2303 generated a tail call (something that might be denied by the ABI
2304 rules governing the call; see calls.c).
2306 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2307 can still reach the rest of BB. The case here is __builtin_sqrt,
2308 where the NaN result goes through the external function (with a
2309 tailcall) and the normal result happens via a sqrt instruction. */
2312 expand_gimple_tailcall (basic_block bb
, gimple stmt
, bool *can_fallthru
)
2320 last2
= last
= expand_gimple_stmt (stmt
);
2322 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
2323 if (CALL_P (last
) && SIBLING_CALL_P (last
))
2326 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2328 *can_fallthru
= true;
2332 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2333 Any instructions emitted here are about to be deleted. */
2334 do_pending_stack_adjust ();
2336 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2337 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2338 EH or abnormal edges, we shouldn't have created a tail call in
2339 the first place. So it seems to me we should just be removing
2340 all edges here, or redirecting the existing fallthru edge to
2346 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2348 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
2350 if (e
->dest
!= EXIT_BLOCK_PTR
)
2352 e
->dest
->count
-= e
->count
;
2353 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
2354 if (e
->dest
->count
< 0)
2356 if (e
->dest
->frequency
< 0)
2357 e
->dest
->frequency
= 0;
2360 probability
+= e
->probability
;
2367 /* This is somewhat ugly: the call_expr expander often emits instructions
2368 after the sibcall (to perform the function return). These confuse the
2369 find_many_sub_basic_blocks code, so we need to get rid of these. */
2370 last
= NEXT_INSN (last
);
2371 gcc_assert (BARRIER_P (last
));
2373 *can_fallthru
= false;
2374 while (NEXT_INSN (last
))
2376 /* For instance an sqrt builtin expander expands if with
2377 sibcall in the then and label for `else`. */
2378 if (LABEL_P (NEXT_INSN (last
)))
2380 *can_fallthru
= true;
2383 delete_insn (NEXT_INSN (last
));
2386 e
= make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_ABNORMAL
| EDGE_SIBCALL
);
2387 e
->probability
+= probability
;
2390 update_bb_for_insn (bb
);
2392 if (NEXT_INSN (last
))
2394 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2397 if (BARRIER_P (last
))
2398 BB_END (bb
) = PREV_INSN (last
);
2401 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2406 /* Return the difference between the floor and the truncated result of
2407 a signed division by OP1 with remainder MOD. */
2409 floor_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2411 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2412 return gen_rtx_IF_THEN_ELSE
2413 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2414 gen_rtx_IF_THEN_ELSE
2415 (mode
, gen_rtx_LT (BImode
,
2416 gen_rtx_DIV (mode
, op1
, mod
),
2418 constm1_rtx
, const0_rtx
),
2422 /* Return the difference between the ceil and the truncated result of
2423 a signed division by OP1 with remainder MOD. */
2425 ceil_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2427 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2428 return gen_rtx_IF_THEN_ELSE
2429 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2430 gen_rtx_IF_THEN_ELSE
2431 (mode
, gen_rtx_GT (BImode
,
2432 gen_rtx_DIV (mode
, op1
, mod
),
2434 const1_rtx
, const0_rtx
),
2438 /* Return the difference between the ceil and the truncated result of
2439 an unsigned division by OP1 with remainder MOD. */
2441 ceil_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
2443 /* (mod != 0 ? 1 : 0) */
2444 return gen_rtx_IF_THEN_ELSE
2445 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2446 const1_rtx
, const0_rtx
);
2449 /* Return the difference between the rounded and the truncated result
2450 of a signed division by OP1 with remainder MOD. Halfway cases are
2451 rounded away from zero, rather than to the nearest even number. */
2453 round_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2455 /* (abs (mod) >= abs (op1) - abs (mod)
2456 ? (op1 / mod > 0 ? 1 : -1)
2458 return gen_rtx_IF_THEN_ELSE
2459 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
2460 gen_rtx_MINUS (mode
,
2461 gen_rtx_ABS (mode
, op1
),
2462 gen_rtx_ABS (mode
, mod
))),
2463 gen_rtx_IF_THEN_ELSE
2464 (mode
, gen_rtx_GT (BImode
,
2465 gen_rtx_DIV (mode
, op1
, mod
),
2467 const1_rtx
, constm1_rtx
),
2471 /* Return the difference between the rounded and the truncated result
2472 of a unsigned division by OP1 with remainder MOD. Halfway cases
2473 are rounded away from zero, rather than to the nearest even
2476 round_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2478 /* (mod >= op1 - mod ? 1 : 0) */
2479 return gen_rtx_IF_THEN_ELSE
2480 (mode
, gen_rtx_GE (BImode
, mod
,
2481 gen_rtx_MINUS (mode
, op1
, mod
)),
2482 const1_rtx
, const0_rtx
);
2485 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2489 convert_debug_memory_address (enum machine_mode mode
, rtx x
,
2492 enum machine_mode xmode
= GET_MODE (x
);
2494 #ifndef POINTERS_EXTEND_UNSIGNED
2495 gcc_assert (mode
== Pmode
2496 || mode
== targetm
.addr_space
.address_mode (as
));
2497 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
2501 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
2503 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
2506 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
2507 x
= simplify_gen_subreg (mode
, x
, xmode
,
2508 subreg_lowpart_offset
2510 else if (POINTERS_EXTEND_UNSIGNED
> 0)
2511 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
2512 else if (!POINTERS_EXTEND_UNSIGNED
)
2513 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
2516 switch (GET_CODE (x
))
2519 if ((SUBREG_PROMOTED_VAR_P (x
)
2520 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
2521 || (GET_CODE (SUBREG_REG (x
)) == PLUS
2522 && REG_P (XEXP (SUBREG_REG (x
), 0))
2523 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
2524 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
2525 && GET_MODE (SUBREG_REG (x
)) == mode
)
2526 return SUBREG_REG (x
);
2529 temp
= gen_rtx_LABEL_REF (mode
, XEXP (x
, 0));
2530 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
2533 temp
= shallow_copy_rtx (x
);
2534 PUT_MODE (temp
, mode
);
2537 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2539 temp
= gen_rtx_CONST (mode
, temp
);
2543 if (CONST_INT_P (XEXP (x
, 1)))
2545 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2547 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
2553 /* Don't know how to express ptr_extend as operation in debug info. */
2556 #endif /* POINTERS_EXTEND_UNSIGNED */
2561 /* Return an RTX equivalent to the value of the parameter DECL. */
2564 expand_debug_parm_decl (tree decl
)
2566 rtx incoming
= DECL_INCOMING_RTL (decl
);
2569 && GET_MODE (incoming
) != BLKmode
2570 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
2571 || (MEM_P (incoming
)
2572 && REG_P (XEXP (incoming
, 0))
2573 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
2575 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
2577 #ifdef HAVE_window_save
2578 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2579 If the target machine has an explicit window save instruction, the
2580 actual entry value is the corresponding OUTGOING_REGNO instead. */
2581 if (REG_P (incoming
)
2582 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
2584 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
2585 OUTGOING_REGNO (REGNO (incoming
)), 0);
2586 else if (MEM_P (incoming
))
2588 rtx reg
= XEXP (incoming
, 0);
2589 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
2591 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
2592 incoming
= replace_equiv_address_nv (incoming
, reg
);
2597 ENTRY_VALUE_EXP (rtl
) = incoming
;
2602 && GET_MODE (incoming
) != BLKmode
2603 && !TREE_ADDRESSABLE (decl
)
2605 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
2606 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
2607 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
2608 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
2614 /* Return an RTX equivalent to the value of the tree expression EXP. */
2617 expand_debug_expr (tree exp
)
2619 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
2620 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
2621 enum machine_mode inner_mode
= VOIDmode
;
2622 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
2625 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
2627 case tcc_expression
:
2628 switch (TREE_CODE (exp
))
2632 case WIDEN_MULT_PLUS_EXPR
:
2633 case WIDEN_MULT_MINUS_EXPR
:
2637 case TRUTH_ANDIF_EXPR
:
2638 case TRUTH_ORIF_EXPR
:
2639 case TRUTH_AND_EXPR
:
2641 case TRUTH_XOR_EXPR
:
2644 case TRUTH_NOT_EXPR
:
2653 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
2660 case tcc_comparison
:
2661 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2668 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2669 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2679 case tcc_exceptional
:
2680 case tcc_declaration
:
2686 switch (TREE_CODE (exp
))
2689 if (!lookup_constant_def (exp
))
2691 if (strlen (TREE_STRING_POINTER (exp
)) + 1
2692 != (size_t) TREE_STRING_LENGTH (exp
))
2694 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
2695 op0
= gen_rtx_MEM (BLKmode
, op0
);
2696 set_mem_attributes (op0
, exp
, 0);
2699 /* Fall through... */
2704 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
2708 gcc_assert (COMPLEX_MODE_P (mode
));
2709 op0
= expand_debug_expr (TREE_REALPART (exp
));
2710 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
2711 return gen_rtx_CONCAT (mode
, op0
, op1
);
2713 case DEBUG_EXPR_DECL
:
2714 op0
= DECL_RTL_IF_SET (exp
);
2719 op0
= gen_rtx_DEBUG_EXPR (mode
);
2720 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
2721 SET_DECL_RTL (exp
, op0
);
2731 op0
= DECL_RTL_IF_SET (exp
);
2733 /* This decl was probably optimized away. */
2736 if (TREE_CODE (exp
) != VAR_DECL
2737 || DECL_EXTERNAL (exp
)
2738 || !TREE_STATIC (exp
)
2740 || DECL_HARD_REGISTER (exp
)
2741 || DECL_IN_CONSTANT_POOL (exp
)
2742 || mode
== VOIDmode
)
2745 op0
= make_decl_rtl_for_debug (exp
);
2747 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
2748 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
2752 op0
= copy_rtx (op0
);
2754 if (GET_MODE (op0
) == BLKmode
2755 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2756 below would ICE. While it is likely a FE bug,
2757 try to be robust here. See PR43166. */
2759 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
2761 gcc_assert (MEM_P (op0
));
2762 op0
= adjust_address_nv (op0
, mode
, 0);
2773 inner_mode
= GET_MODE (op0
);
2775 if (mode
== inner_mode
)
2778 if (inner_mode
== VOIDmode
)
2780 if (TREE_CODE (exp
) == SSA_NAME
)
2781 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
2783 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2784 if (mode
== inner_mode
)
2788 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
2790 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
2791 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
2792 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
2793 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
2795 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
2797 else if (FLOAT_MODE_P (mode
))
2799 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
2800 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
2801 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
2803 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
2805 else if (FLOAT_MODE_P (inner_mode
))
2808 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
2810 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
2812 else if (CONSTANT_P (op0
)
2813 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
2814 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
2815 subreg_lowpart_offset (mode
,
2817 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == tcc_unary
2818 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
2820 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
2822 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
2828 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2830 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
2831 TREE_OPERAND (exp
, 0),
2832 TREE_OPERAND (exp
, 1));
2834 return expand_debug_expr (newexp
);
2838 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2839 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2843 if (TREE_CODE (exp
) == MEM_REF
)
2845 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
2846 || (GET_CODE (op0
) == PLUS
2847 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
2848 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2849 Instead just use get_inner_reference. */
2852 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2853 if (!op1
|| !CONST_INT_P (op1
))
2856 op0
= plus_constant (inner_mode
, op0
, INTVAL (op1
));
2859 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2860 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2862 as
= ADDR_SPACE_GENERIC
;
2864 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2866 if (op0
== NULL_RTX
)
2869 op0
= gen_rtx_MEM (mode
, op0
);
2870 set_mem_attributes (op0
, exp
, 0);
2871 if (TREE_CODE (exp
) == MEM_REF
2872 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2873 set_mem_expr (op0
, NULL_TREE
);
2874 set_mem_addr_space (op0
, as
);
2878 case TARGET_MEM_REF
:
2879 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
2880 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
2883 op0
= expand_debug_expr
2884 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
2888 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2889 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2891 as
= ADDR_SPACE_GENERIC
;
2893 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2895 if (op0
== NULL_RTX
)
2898 op0
= gen_rtx_MEM (mode
, op0
);
2900 set_mem_attributes (op0
, exp
, 0);
2901 set_mem_addr_space (op0
, as
);
2907 case ARRAY_RANGE_REF
:
2912 case VIEW_CONVERT_EXPR
:
2914 enum machine_mode mode1
;
2915 HOST_WIDE_INT bitsize
, bitpos
;
2918 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
2919 &mode1
, &unsignedp
, &volatilep
, false);
2925 orig_op0
= op0
= expand_debug_expr (tem
);
2932 enum machine_mode addrmode
, offmode
;
2937 op0
= XEXP (op0
, 0);
2938 addrmode
= GET_MODE (op0
);
2939 if (addrmode
== VOIDmode
)
2942 op1
= expand_debug_expr (offset
);
2946 offmode
= GET_MODE (op1
);
2947 if (offmode
== VOIDmode
)
2948 offmode
= TYPE_MODE (TREE_TYPE (offset
));
2950 if (addrmode
!= offmode
)
2951 op1
= simplify_gen_subreg (addrmode
, op1
, offmode
,
2952 subreg_lowpart_offset (addrmode
,
2955 /* Don't use offset_address here, we don't need a
2956 recognizable address, and we don't want to generate
2958 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
2964 if (mode1
== VOIDmode
)
2966 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
2967 if (bitpos
>= BITS_PER_UNIT
)
2969 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
2970 bitpos
%= BITS_PER_UNIT
;
2972 else if (bitpos
< 0)
2975 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
2976 op0
= adjust_address_nv (op0
, mode1
, units
);
2977 bitpos
+= units
* BITS_PER_UNIT
;
2979 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
2980 op0
= adjust_address_nv (op0
, mode
, 0);
2981 else if (GET_MODE (op0
) != mode1
)
2982 op0
= adjust_address_nv (op0
, mode1
, 0);
2984 op0
= copy_rtx (op0
);
2985 if (op0
== orig_op0
)
2986 op0
= shallow_copy_rtx (op0
);
2987 set_mem_attributes (op0
, exp
, 0);
2990 if (bitpos
== 0 && mode
== GET_MODE (op0
))
2996 if (GET_MODE (op0
) == BLKmode
)
2999 if ((bitpos
% BITS_PER_UNIT
) == 0
3000 && bitsize
== GET_MODE_BITSIZE (mode1
))
3002 enum machine_mode opmode
= GET_MODE (op0
);
3004 if (opmode
== VOIDmode
)
3005 opmode
= TYPE_MODE (TREE_TYPE (tem
));
3007 /* This condition may hold if we're expanding the address
3008 right past the end of an array that turned out not to
3009 be addressable (i.e., the address was only computed in
3010 debug stmts). The gen_subreg below would rightfully
3011 crash, and the address doesn't really exist, so just
3013 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
3016 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
3017 return simplify_gen_subreg (mode
, op0
, opmode
,
3018 bitpos
/ BITS_PER_UNIT
);
3021 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
3022 && TYPE_UNSIGNED (TREE_TYPE (exp
))
3024 : ZERO_EXTRACT
, mode
,
3025 GET_MODE (op0
) != VOIDmode
3027 : TYPE_MODE (TREE_TYPE (tem
)),
3028 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
3032 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
3035 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
3038 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
3041 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3043 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
3046 case FIX_TRUNC_EXPR
:
3047 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
3050 case POINTER_PLUS_EXPR
:
3051 /* For the rare target where pointers are not the same size as
3052 size_t, we need to check for mis-matched modes and correct
3055 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
3056 && GET_MODE (op0
) != GET_MODE (op1
))
3058 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
)))
3059 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
3062 /* We always sign-extend, regardless of the signedness of
3063 the operand, because the operand is always unsigned
3064 here even if the original C expression is signed. */
3065 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
3070 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
3073 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
3076 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
3079 case TRUNC_DIV_EXPR
:
3080 case EXACT_DIV_EXPR
:
3082 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3084 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
3086 case TRUNC_MOD_EXPR
:
3087 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
3089 case FLOOR_DIV_EXPR
:
3091 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3094 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3095 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3096 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
3097 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3100 case FLOOR_MOD_EXPR
:
3102 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3105 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3106 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
3107 adj
= simplify_gen_unary (NEG
, mode
,
3108 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3110 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3116 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3117 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3118 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
3119 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3123 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3124 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3125 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
3126 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3132 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3133 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
3134 adj
= simplify_gen_unary (NEG
, mode
,
3135 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3137 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3141 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3142 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
3143 adj
= simplify_gen_unary (NEG
, mode
,
3144 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3146 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3149 case ROUND_DIV_EXPR
:
3152 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
3153 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3154 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
3155 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3159 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
3160 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3161 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
3162 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3165 case ROUND_MOD_EXPR
:
3168 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3169 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
3170 adj
= simplify_gen_unary (NEG
, mode
,
3171 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3173 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3177 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3178 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
3179 adj
= simplify_gen_unary (NEG
, mode
,
3180 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3182 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3186 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
3190 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
3192 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
3195 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
3198 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
3201 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
3204 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
3207 case TRUTH_AND_EXPR
:
3208 return simplify_gen_binary (AND
, mode
, op0
, op1
);
3212 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
3215 case TRUTH_XOR_EXPR
:
3216 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
3218 case TRUTH_ANDIF_EXPR
:
3219 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
3221 case TRUTH_ORIF_EXPR
:
3222 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
3224 case TRUTH_NOT_EXPR
:
3225 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
3228 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
3232 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
3236 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
3240 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
3244 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
3247 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
3249 case UNORDERED_EXPR
:
3250 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
3253 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
3256 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
3259 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
3262 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
3265 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
3268 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
3271 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
3274 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
3277 gcc_assert (COMPLEX_MODE_P (mode
));
3278 if (GET_MODE (op0
) == VOIDmode
)
3279 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
3280 if (GET_MODE (op1
) == VOIDmode
)
3281 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
3282 return gen_rtx_CONCAT (mode
, op0
, op1
);
3285 if (GET_CODE (op0
) == CONCAT
)
3286 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
3287 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
3289 GET_MODE_INNER (mode
)));
3292 enum machine_mode imode
= GET_MODE_INNER (mode
);
3297 re
= adjust_address_nv (op0
, imode
, 0);
3298 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
3302 enum machine_mode ifmode
= int_mode_for_mode (mode
);
3303 enum machine_mode ihmode
= int_mode_for_mode (imode
);
3305 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
3307 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
3310 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
3311 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
3312 if (imode
!= ihmode
)
3313 re
= gen_rtx_SUBREG (imode
, re
, 0);
3314 im
= copy_rtx (op0
);
3316 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
3317 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
3318 if (imode
!= ihmode
)
3319 im
= gen_rtx_SUBREG (imode
, im
, 0);
3321 im
= gen_rtx_NEG (imode
, im
);
3322 return gen_rtx_CONCAT (mode
, re
, im
);
3326 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
3327 if (!op0
|| !MEM_P (op0
))
3329 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
3330 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
3331 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
3332 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
3333 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
3334 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
3336 if (handled_component_p (TREE_OPERAND (exp
, 0)))
3338 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
3340 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
3341 &bitoffset
, &bitsize
, &maxsize
);
3342 if ((TREE_CODE (decl
) == VAR_DECL
3343 || TREE_CODE (decl
) == PARM_DECL
3344 || TREE_CODE (decl
) == RESULT_DECL
)
3345 && (!TREE_ADDRESSABLE (decl
)
3346 || target_for_debug_bind (decl
))
3347 && (bitoffset
% BITS_PER_UNIT
) == 0
3349 && bitsize
== maxsize
)
3351 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
3352 return plus_constant (mode
, base
, bitoffset
/ BITS_PER_UNIT
);
3359 as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
3360 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
3368 op0
= gen_rtx_CONCATN
3369 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
3371 for (i
= 0; i
< VECTOR_CST_NELTS (exp
); ++i
)
3373 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
3376 XVECEXP (op0
, 0, i
) = op1
;
3383 if (TREE_CLOBBER_P (exp
))
3385 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
3390 op0
= gen_rtx_CONCATN
3391 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
3393 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
3395 op1
= expand_debug_expr (val
);
3398 XVECEXP (op0
, 0, i
) = op1
;
3401 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
3403 op1
= expand_debug_expr
3404 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
3409 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
3410 XVECEXP (op0
, 0, i
) = op1
;
3416 goto flag_unsupported
;
3419 /* ??? Maybe handle some builtins? */
3424 gimple g
= get_gimple_for_ssa_name (exp
);
3427 op0
= expand_debug_expr (gimple_assign_rhs_to_tree (g
));
3433 int part
= var_to_partition (SA
.map
, exp
);
3435 if (part
== NO_PARTITION
)
3437 /* If this is a reference to an incoming value of parameter
3438 that is never used in the code or where the incoming
3439 value is never used in the code, use PARM_DECL's
3441 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
3442 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
)
3444 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
3447 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
3454 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
3456 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
3464 /* Vector stuff. For most of the codes we don't have rtl codes. */
3465 case REALIGN_LOAD_EXPR
:
3466 case REDUC_MAX_EXPR
:
3467 case REDUC_MIN_EXPR
:
3468 case REDUC_PLUS_EXPR
:
3470 case VEC_LSHIFT_EXPR
:
3471 case VEC_PACK_FIX_TRUNC_EXPR
:
3472 case VEC_PACK_SAT_EXPR
:
3473 case VEC_PACK_TRUNC_EXPR
:
3474 case VEC_RSHIFT_EXPR
:
3475 case VEC_UNPACK_FLOAT_HI_EXPR
:
3476 case VEC_UNPACK_FLOAT_LO_EXPR
:
3477 case VEC_UNPACK_HI_EXPR
:
3478 case VEC_UNPACK_LO_EXPR
:
3479 case VEC_WIDEN_MULT_HI_EXPR
:
3480 case VEC_WIDEN_MULT_LO_EXPR
:
3481 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3482 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3487 case ADDR_SPACE_CONVERT_EXPR
:
3488 case FIXED_CONVERT_EXPR
:
3490 case WITH_SIZE_EXPR
:
3494 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3495 && SCALAR_INT_MODE_P (mode
))
3498 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3500 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3503 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3505 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
3507 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3508 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3512 case WIDEN_MULT_EXPR
:
3513 case WIDEN_MULT_PLUS_EXPR
:
3514 case WIDEN_MULT_MINUS_EXPR
:
3515 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3516 && SCALAR_INT_MODE_P (mode
))
3518 inner_mode
= GET_MODE (op0
);
3519 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3520 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3522 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3523 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
3524 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
3526 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
3527 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3528 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
3530 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
3531 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3533 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
3537 case WIDEN_SUM_EXPR
:
3538 case WIDEN_LSHIFT_EXPR
:
3539 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3540 && SCALAR_INT_MODE_P (mode
))
3543 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3545 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3547 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
3548 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
3553 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
3557 #ifdef ENABLE_CHECKING
3566 /* Return an RTX equivalent to the source bind value of the tree expression
3570 expand_debug_source_expr (tree exp
)
3573 enum machine_mode mode
= VOIDmode
, inner_mode
;
3575 switch (TREE_CODE (exp
))
3579 mode
= DECL_MODE (exp
);
3580 op0
= expand_debug_parm_decl (exp
);
3583 /* See if this isn't an argument that has been completely
3585 if (!DECL_RTL_SET_P (exp
)
3586 && !DECL_INCOMING_RTL (exp
)
3587 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
3590 if (DECL_ABSTRACT_ORIGIN (exp
))
3591 aexp
= DECL_ABSTRACT_ORIGIN (exp
);
3592 if (DECL_CONTEXT (aexp
)
3593 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
3595 VEC(tree
, gc
) **debug_args
;
3598 #ifdef ENABLE_CHECKING
3600 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3601 parm
; parm
= DECL_CHAIN (parm
))
3602 gcc_assert (parm
!= exp
3603 && DECL_ABSTRACT_ORIGIN (parm
) != aexp
);
3605 debug_args
= decl_debug_args_lookup (current_function_decl
);
3606 if (debug_args
!= NULL
)
3608 for (ix
= 0; VEC_iterate (tree
, *debug_args
, ix
, ddecl
);
3611 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
3621 if (op0
== NULL_RTX
)
3624 inner_mode
= GET_MODE (op0
);
3625 if (mode
== inner_mode
)
3628 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
3630 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
3631 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
3632 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
3633 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
3635 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
3637 else if (FLOAT_MODE_P (mode
))
3639 else if (FLOAT_MODE_P (inner_mode
))
3641 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3642 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
3644 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
3646 else if (CONSTANT_P (op0
)
3647 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
3648 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
3649 subreg_lowpart_offset (mode
, inner_mode
));
3650 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3651 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3653 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3658 /* Expand the _LOCs in debug insns. We run this after expanding all
3659 regular insns, so that any variables referenced in the function
3660 will have their DECL_RTLs set. */
3663 expand_debug_locations (void)
3666 rtx last
= get_last_insn ();
3667 int save_strict_alias
= flag_strict_aliasing
;
3669 /* New alias sets while setting up memory attributes cause
3670 -fcompare-debug failures, even though it doesn't bring about any
3672 flag_strict_aliasing
= 0;
3674 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3675 if (DEBUG_INSN_P (insn
))
3677 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
3679 enum machine_mode mode
;
3681 if (value
== NULL_TREE
)
3685 if (INSN_VAR_LOCATION_STATUS (insn
)
3686 == VAR_INIT_STATUS_UNINITIALIZED
)
3687 val
= expand_debug_source_expr (value
);
3689 val
= expand_debug_expr (value
);
3690 gcc_assert (last
== get_last_insn ());
3694 val
= gen_rtx_UNKNOWN_VAR_LOC ();
3697 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
3699 gcc_assert (mode
== GET_MODE (val
)
3700 || (GET_MODE (val
) == VOIDmode
3701 && (CONST_INT_P (val
)
3702 || GET_CODE (val
) == CONST_FIXED
3703 || GET_CODE (val
) == CONST_DOUBLE
3704 || GET_CODE (val
) == LABEL_REF
)));
3707 INSN_VAR_LOCATION_LOC (insn
) = val
;
3710 flag_strict_aliasing
= save_strict_alias
;
3713 /* Expand basic block BB from GIMPLE trees to RTL. */
3716 expand_gimple_basic_block (basic_block bb
)
3718 gimple_stmt_iterator gsi
;
3727 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
3730 /* Note that since we are now transitioning from GIMPLE to RTL, we
3731 cannot use the gsi_*_bb() routines because they expect the basic
3732 block to be in GIMPLE, instead of RTL. Therefore, we need to
3733 access the BB sequence directly. */
3734 stmts
= bb_seq (bb
);
3735 bb
->il
.gimple
.seq
= NULL
;
3736 bb
->il
.gimple
.phi_nodes
= NULL
;
3737 rtl_profile_for_bb (bb
);
3738 init_rtl_bb_info (bb
);
3739 bb
->flags
|= BB_RTL
;
3741 /* Remove the RETURN_EXPR if we may fall though to the exit
3743 gsi
= gsi_last (stmts
);
3744 if (!gsi_end_p (gsi
)
3745 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
3747 gimple ret_stmt
= gsi_stmt (gsi
);
3749 gcc_assert (single_succ_p (bb
));
3750 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR
);
3752 if (bb
->next_bb
== EXIT_BLOCK_PTR
3753 && !gimple_return_retval (ret_stmt
))
3755 gsi_remove (&gsi
, false);
3756 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
3760 gsi
= gsi_start (stmts
);
3761 if (!gsi_end_p (gsi
))
3763 stmt
= gsi_stmt (gsi
);
3764 if (gimple_code (stmt
) != GIMPLE_LABEL
)
3768 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
3772 last
= get_last_insn ();
3776 expand_gimple_stmt (stmt
);
3781 emit_label ((rtx
) *elt
);
3783 /* Java emits line number notes in the top of labels.
3784 ??? Make this go away once line number notes are obsoleted. */
3785 BB_HEAD (bb
) = NEXT_INSN (last
);
3786 if (NOTE_P (BB_HEAD (bb
)))
3787 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
3788 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
3790 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3793 note
= BB_HEAD (bb
) = emit_note (NOTE_INSN_BASIC_BLOCK
);
3795 NOTE_BASIC_BLOCK (note
) = bb
;
3797 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3801 stmt
= gsi_stmt (gsi
);
3803 /* If this statement is a non-debug one, and we generate debug
3804 insns, then this one might be the last real use of a TERed
3805 SSA_NAME, but where there are still some debug uses further
3806 down. Expanding the current SSA name in such further debug
3807 uses by their RHS might lead to wrong debug info, as coalescing
3808 might make the operands of such RHS be placed into the same
3809 pseudo as something else. Like so:
3810 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3814 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3815 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3816 the write to a_2 would actually have clobbered the place which
3819 So, instead of that, we recognize the situation, and generate
3820 debug temporaries at the last real use of TERed SSA names:
3827 if (MAY_HAVE_DEBUG_INSNS
3829 && !is_gimple_debug (stmt
))
3835 location_t sloc
= get_curr_insn_source_location ();
3836 tree sblock
= get_curr_insn_block ();
3838 /* Look for SSA names that have their last use here (TERed
3839 names always have only one real use). */
3840 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3841 if ((def
= get_gimple_for_ssa_name (op
)))
3843 imm_use_iterator imm_iter
;
3844 use_operand_p use_p
;
3845 bool have_debug_uses
= false;
3847 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
3849 if (gimple_debug_bind_p (USE_STMT (use_p
)))
3851 have_debug_uses
= true;
3856 if (have_debug_uses
)
3858 /* OP is a TERed SSA name, with DEF it's defining
3859 statement, and where OP is used in further debug
3860 instructions. Generate a debug temporary, and
3861 replace all uses of OP in debug insns with that
3864 tree value
= gimple_assign_rhs_to_tree (def
);
3865 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
3867 enum machine_mode mode
;
3869 set_curr_insn_source_location (gimple_location (def
));
3870 set_curr_insn_block (gimple_block (def
));
3872 DECL_ARTIFICIAL (vexpr
) = 1;
3873 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
3875 mode
= DECL_MODE (value
);
3877 mode
= TYPE_MODE (TREE_TYPE (value
));
3878 DECL_MODE (vexpr
) = mode
;
3880 val
= gen_rtx_VAR_LOCATION
3881 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3883 emit_debug_insn (val
);
3885 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
3887 if (!gimple_debug_bind_p (debugstmt
))
3890 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
3891 SET_USE (use_p
, vexpr
);
3893 update_stmt (debugstmt
);
3897 set_curr_insn_source_location (sloc
);
3898 set_curr_insn_block (sblock
);
3901 currently_expanding_gimple_stmt
= stmt
;
3903 /* Expand this statement, then evaluate the resulting RTL and
3904 fixup the CFG accordingly. */
3905 if (gimple_code (stmt
) == GIMPLE_COND
)
3907 new_bb
= expand_gimple_cond (bb
, stmt
);
3911 else if (gimple_debug_bind_p (stmt
))
3913 location_t sloc
= get_curr_insn_source_location ();
3914 tree sblock
= get_curr_insn_block ();
3915 gimple_stmt_iterator nsi
= gsi
;
3919 tree var
= gimple_debug_bind_get_var (stmt
);
3922 enum machine_mode mode
;
3924 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
3925 && TREE_CODE (var
) != LABEL_DECL
3926 && !target_for_debug_bind (var
))
3927 goto delink_debug_stmt
;
3929 if (gimple_debug_bind_has_value_p (stmt
))
3930 value
= gimple_debug_bind_get_value (stmt
);
3934 last
= get_last_insn ();
3936 set_curr_insn_source_location (gimple_location (stmt
));
3937 set_curr_insn_block (gimple_block (stmt
));
3940 mode
= DECL_MODE (var
);
3942 mode
= TYPE_MODE (TREE_TYPE (var
));
3944 val
= gen_rtx_VAR_LOCATION
3945 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3947 emit_debug_insn (val
);
3949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3951 /* We can't dump the insn with a TREE where an RTX
3953 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
3954 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3955 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
3959 /* In order not to generate too many debug temporaries,
3960 we delink all uses of debug statements we already expanded.
3961 Therefore debug statements between definition and real
3962 use of TERed SSA names will continue to use the SSA name,
3963 and not be replaced with debug temps. */
3964 delink_stmt_imm_use (stmt
);
3968 if (gsi_end_p (nsi
))
3970 stmt
= gsi_stmt (nsi
);
3971 if (!gimple_debug_bind_p (stmt
))
3975 set_curr_insn_source_location (sloc
);
3976 set_curr_insn_block (sblock
);
3978 else if (gimple_debug_source_bind_p (stmt
))
3980 location_t sloc
= get_curr_insn_source_location ();
3981 tree sblock
= get_curr_insn_block ();
3982 tree var
= gimple_debug_source_bind_get_var (stmt
);
3983 tree value
= gimple_debug_source_bind_get_value (stmt
);
3985 enum machine_mode mode
;
3987 last
= get_last_insn ();
3989 set_curr_insn_source_location (gimple_location (stmt
));
3990 set_curr_insn_block (gimple_block (stmt
));
3992 mode
= DECL_MODE (var
);
3994 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
3995 VAR_INIT_STATUS_UNINITIALIZED
);
3997 emit_debug_insn (val
);
3999 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4001 /* We can't dump the insn with a TREE where an RTX
4003 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
4004 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
4005 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
4008 set_curr_insn_source_location (sloc
);
4009 set_curr_insn_block (sblock
);
4013 if (is_gimple_call (stmt
) && gimple_call_tail_p (stmt
))
4016 new_bb
= expand_gimple_tailcall (bb
, stmt
, &can_fallthru
);
4027 def_operand_p def_p
;
4028 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
4032 /* Ignore this stmt if it is in the list of
4033 replaceable expressions. */
4035 && bitmap_bit_p (SA
.values
,
4036 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
4039 last
= expand_gimple_stmt (stmt
);
4040 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
4045 currently_expanding_gimple_stmt
= NULL
;
4047 /* Expand implicit goto and convert goto_locus. */
4048 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
4050 if (e
->goto_locus
&& e
->goto_block
)
4052 set_curr_insn_source_location (e
->goto_locus
);
4053 set_curr_insn_block (e
->goto_block
);
4054 e
->goto_locus
= curr_insn_locator ();
4056 e
->goto_block
= NULL
;
4057 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
4059 emit_jump (label_rtx_for_bb (e
->dest
));
4060 e
->flags
&= ~EDGE_FALLTHRU
;
4064 /* Expanded RTL can create a jump in the last instruction of block.
4065 This later might be assumed to be a jump to successor and break edge insertion.
4066 We need to insert dummy move to prevent this. PR41440. */
4067 if (single_succ_p (bb
)
4068 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
4069 && (last
= get_last_insn ())
4072 rtx dummy
= gen_reg_rtx (SImode
);
4073 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
4076 do_pending_stack_adjust ();
4078 /* Find the block tail. The last insn in the block is the insn
4079 before a barrier and/or table jump insn. */
4080 last
= get_last_insn ();
4081 if (BARRIER_P (last
))
4082 last
= PREV_INSN (last
);
4083 if (JUMP_TABLE_DATA_P (last
))
4084 last
= PREV_INSN (PREV_INSN (last
));
4087 update_bb_for_insn (bb
);
4093 /* Create a basic block for initialization code. */
4096 construct_init_block (void)
4098 basic_block init_block
, first_block
;
4102 /* Multiple entry points not supported yet. */
4103 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR
->succs
) == 1);
4104 init_rtl_bb_info (ENTRY_BLOCK_PTR
);
4105 init_rtl_bb_info (EXIT_BLOCK_PTR
);
4106 ENTRY_BLOCK_PTR
->flags
|= BB_RTL
;
4107 EXIT_BLOCK_PTR
->flags
|= BB_RTL
;
4109 e
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0);
4111 /* When entry edge points to first basic block, we don't need jump,
4112 otherwise we have to jump into proper target. */
4113 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR
->next_bb
)
4115 tree label
= gimple_block_label (e
->dest
);
4117 emit_jump (label_rtx (label
));
4121 flags
= EDGE_FALLTHRU
;
4123 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
4126 init_block
->frequency
= ENTRY_BLOCK_PTR
->frequency
;
4127 init_block
->count
= ENTRY_BLOCK_PTR
->count
;
4128 if (current_loops
&& ENTRY_BLOCK_PTR
->loop_father
)
4129 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR
->loop_father
);
4132 first_block
= e
->dest
;
4133 redirect_edge_succ (e
, init_block
);
4134 e
= make_edge (init_block
, first_block
, flags
);
4137 e
= make_edge (init_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
4138 e
->probability
= REG_BR_PROB_BASE
;
4139 e
->count
= ENTRY_BLOCK_PTR
->count
;
4141 update_bb_for_insn (init_block
);
4145 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4146 found in the block tree. */
4149 set_block_levels (tree block
, int level
)
4153 BLOCK_NUMBER (block
) = level
;
4154 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
4155 block
= BLOCK_CHAIN (block
);
4159 /* Create a block containing landing pads and similar stuff. */
4162 construct_exit_block (void)
4164 rtx head
= get_last_insn ();
4166 basic_block exit_block
;
4170 rtx orig_end
= BB_END (EXIT_BLOCK_PTR
->prev_bb
);
4172 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
4174 /* Make sure the locus is set to the end of the function, so that
4175 epilogue line numbers and warnings are set properly. */
4176 if (cfun
->function_end_locus
!= UNKNOWN_LOCATION
)
4177 input_location
= cfun
->function_end_locus
;
4179 /* The following insns belong to the top scope. */
4180 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4182 /* Generate rtl for function exit. */
4183 expand_function_end ();
4185 end
= get_last_insn ();
4188 /* While emitting the function end we could move end of the last basic block.
4190 BB_END (EXIT_BLOCK_PTR
->prev_bb
) = orig_end
;
4191 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
4192 head
= NEXT_INSN (head
);
4193 exit_block
= create_basic_block (NEXT_INSN (head
), end
,
4194 EXIT_BLOCK_PTR
->prev_bb
);
4195 exit_block
->frequency
= EXIT_BLOCK_PTR
->frequency
;
4196 exit_block
->count
= EXIT_BLOCK_PTR
->count
;
4197 if (current_loops
&& EXIT_BLOCK_PTR
->loop_father
)
4198 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR
->loop_father
);
4201 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR
->preds
))
4203 e
= EDGE_PRED (EXIT_BLOCK_PTR
, ix
);
4204 if (!(e
->flags
& EDGE_ABNORMAL
))
4205 redirect_edge_succ (e
, exit_block
);
4210 e
= make_edge (exit_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
4211 e
->probability
= REG_BR_PROB_BASE
;
4212 e
->count
= EXIT_BLOCK_PTR
->count
;
4213 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR
->preds
)
4216 e
->count
-= e2
->count
;
4217 exit_block
->count
-= e2
->count
;
4218 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
4222 if (exit_block
->count
< 0)
4223 exit_block
->count
= 0;
4224 if (exit_block
->frequency
< 0)
4225 exit_block
->frequency
= 0;
4226 update_bb_for_insn (exit_block
);
4229 /* Helper function for discover_nonconstant_array_refs.
4230 Look for ARRAY_REF nodes with non-constant indexes and mark them
4234 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
4235 void *data ATTRIBUTE_UNUSED
)
4239 if (IS_TYPE_OR_DECL_P (t
))
4241 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4243 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4244 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
4245 && (!TREE_OPERAND (t
, 2)
4246 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4247 || (TREE_CODE (t
) == COMPONENT_REF
4248 && (!TREE_OPERAND (t
,2)
4249 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4250 || TREE_CODE (t
) == BIT_FIELD_REF
4251 || TREE_CODE (t
) == REALPART_EXPR
4252 || TREE_CODE (t
) == IMAGPART_EXPR
4253 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
4254 || CONVERT_EXPR_P (t
))
4255 t
= TREE_OPERAND (t
, 0);
4257 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4259 t
= get_base_address (t
);
4261 && DECL_MODE (t
) != BLKmode
)
4262 TREE_ADDRESSABLE (t
) = 1;
4271 /* RTL expansion is not able to compile array references with variable
4272 offsets for arrays stored in single register. Discover such
4273 expressions and mark variables as addressable to avoid this
4277 discover_nonconstant_array_refs (void)
4280 gimple_stmt_iterator gsi
;
4283 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4285 gimple stmt
= gsi_stmt (gsi
);
4286 if (!is_gimple_debug (stmt
))
4287 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
4291 /* This function sets crtl->args.internal_arg_pointer to a virtual
4292 register if DRAP is needed. Local register allocator will replace
4293 virtual_incoming_args_rtx with the virtual register. */
4296 expand_stack_alignment (void)
4299 unsigned int preferred_stack_boundary
;
4301 if (! SUPPORTS_STACK_ALIGNMENT
)
4304 if (cfun
->calls_alloca
4305 || cfun
->has_nonlocal_label
4306 || crtl
->has_nonlocal_goto
)
4307 crtl
->need_drap
= true;
4309 /* Call update_stack_boundary here again to update incoming stack
4310 boundary. It may set incoming stack alignment to a different
4311 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4312 use the minimum incoming stack alignment to check if it is OK
4313 to perform sibcall optimization since sibcall optimization will
4314 only align the outgoing stack to incoming stack boundary. */
4315 if (targetm
.calls
.update_stack_boundary
)
4316 targetm
.calls
.update_stack_boundary ();
4318 /* The incoming stack frame has to be aligned at least at
4319 parm_stack_boundary. */
4320 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
4322 /* Update crtl->stack_alignment_estimated and use it later to align
4323 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4324 exceptions since callgraph doesn't collect incoming stack alignment
4326 if (cfun
->can_throw_non_call_exceptions
4327 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
4328 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
4330 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
4331 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
4332 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
4333 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
4334 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
4336 gcc_assert (crtl
->stack_alignment_needed
4337 <= crtl
->stack_alignment_estimated
);
4339 crtl
->stack_realign_needed
4340 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
4341 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
4343 crtl
->stack_realign_processed
= true;
4345 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4347 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
4348 drap_rtx
= targetm
.calls
.get_drap_rtx ();
4350 /* stack_realign_drap and drap_rtx must match. */
4351 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
4353 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4354 if (NULL
!= drap_rtx
)
4356 crtl
->args
.internal_arg_pointer
= drap_rtx
;
4358 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4360 fixup_tail_calls ();
4364 /* Translate the intermediate representation contained in the CFG
4365 from GIMPLE trees to RTL.
4367 We do conversion per basic block and preserve/update the tree CFG.
4368 This implies we have to do some magic as the CFG can simultaneously
4369 consist of basic blocks containing RTL and GIMPLE trees. This can
4370 confuse the CFG hooks, so be careful to not manipulate CFG during
4374 gimple_expand_cfg (void)
4376 basic_block bb
, init_block
;
4383 timevar_push (TV_OUT_OF_SSA
);
4384 rewrite_out_of_ssa (&SA
);
4385 timevar_pop (TV_OUT_OF_SSA
);
4386 SA
.partition_to_pseudo
= (rtx
*)xcalloc (SA
.map
->num_partitions
,
4389 /* Make sure all values used by the optimization passes have sane
4393 /* Some backends want to know that we are expanding to RTL. */
4394 currently_expanding_to_rtl
= 1;
4395 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4396 free_dominance_info (CDI_DOMINATORS
);
4398 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
4400 insn_locators_alloc ();
4401 if (!DECL_IS_BUILTIN (current_function_decl
))
4403 /* Eventually, all FEs should explicitly set function_start_locus. */
4404 if (cfun
->function_start_locus
== UNKNOWN_LOCATION
)
4405 set_curr_insn_source_location
4406 (DECL_SOURCE_LOCATION (current_function_decl
));
4408 set_curr_insn_source_location (cfun
->function_start_locus
);
4411 set_curr_insn_source_location (UNKNOWN_LOCATION
);
4412 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4413 prologue_locator
= curr_insn_locator ();
4415 #ifdef INSN_SCHEDULING
4416 init_sched_attrs ();
4419 /* Make sure first insn is a note even if we don't want linenums.
4420 This makes sure the first insn will never be deleted.
4421 Also, final expects a note to appear there. */
4422 emit_note (NOTE_INSN_DELETED
);
4424 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4425 discover_nonconstant_array_refs ();
4427 targetm
.expand_to_rtl_hook ();
4428 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
4429 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
4430 crtl
->stack_alignment_estimated
= 0;
4431 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
4432 cfun
->cfg
->max_jumptable_ents
= 0;
4434 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4435 of the function section at exapnsion time to predict distance of calls. */
4436 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
4438 /* Expand the variables recorded during gimple lowering. */
4439 timevar_push (TV_VAR_EXPAND
);
4442 expand_used_vars ();
4444 var_seq
= get_insns ();
4446 timevar_pop (TV_VAR_EXPAND
);
4448 /* Honor stack protection warnings. */
4449 if (warn_stack_protect
)
4451 if (cfun
->calls_alloca
)
4452 warning (OPT_Wstack_protector
,
4453 "stack protector not protecting local variables: "
4454 "variable length buffer");
4455 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
4456 warning (OPT_Wstack_protector
,
4457 "stack protector not protecting function: "
4458 "all local arrays are less than %d bytes long",
4459 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
4462 /* Set up parameters and prepare for return, for the function. */
4463 expand_function_start (current_function_decl
);
4465 /* If we emitted any instructions for setting up the variables,
4466 emit them before the FUNCTION_START note. */
4469 emit_insn_before (var_seq
, parm_birth_insn
);
4471 /* In expand_function_end we'll insert the alloca save/restore
4472 before parm_birth_insn. We've just insertted an alloca call.
4473 Adjust the pointer to match. */
4474 parm_birth_insn
= var_seq
;
4477 /* Now that we also have the parameter RTXs, copy them over to our
4479 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
4481 tree var
= SSA_NAME_VAR (partition_to_var (SA
.map
, i
));
4483 if (TREE_CODE (var
) != VAR_DECL
4484 && !SA
.partition_to_pseudo
[i
])
4485 SA
.partition_to_pseudo
[i
] = DECL_RTL_IF_SET (var
);
4486 gcc_assert (SA
.partition_to_pseudo
[i
]);
4488 /* If this decl was marked as living in multiple places, reset
4489 this now to NULL. */
4490 if (DECL_RTL_IF_SET (var
) == pc_rtx
)
4491 SET_DECL_RTL (var
, NULL
);
4493 /* Some RTL parts really want to look at DECL_RTL(x) when x
4494 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4495 SET_DECL_RTL here making this available, but that would mean
4496 to select one of the potentially many RTLs for one DECL. Instead
4497 of doing that we simply reset the MEM_EXPR of the RTL in question,
4498 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4499 if (!DECL_RTL_SET_P (var
))
4501 if (MEM_P (SA
.partition_to_pseudo
[i
]))
4502 set_mem_expr (SA
.partition_to_pseudo
[i
], NULL
);
4506 /* If we have a class containing differently aligned pointers
4507 we need to merge those into the corresponding RTL pointer
4509 for (i
= 1; i
< num_ssa_names
; i
++)
4511 tree name
= ssa_name (i
);
4516 || !POINTER_TYPE_P (TREE_TYPE (name
))
4517 /* We might have generated new SSA names in
4518 update_alias_info_with_stack_vars. They will have a NULL
4519 defining statements, and won't be part of the partitioning,
4521 || !SSA_NAME_DEF_STMT (name
))
4523 part
= var_to_partition (SA
.map
, name
);
4524 if (part
== NO_PARTITION
)
4526 r
= SA
.partition_to_pseudo
[part
];
4528 mark_reg_pointer (r
, get_pointer_alignment (name
));
4531 /* If this function is `main', emit a call to `__main'
4532 to run global initializers, etc. */
4533 if (DECL_NAME (current_function_decl
)
4534 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
4535 && DECL_FILE_SCOPE_P (current_function_decl
))
4536 expand_main_function ();
4538 /* Initialize the stack_protect_guard field. This must happen after the
4539 call to __main (if any) so that the external decl is initialized. */
4540 if (crtl
->stack_protect_guard
)
4541 stack_protect_prologue ();
4543 expand_phi_nodes (&SA
);
4545 /* Register rtl specific functions for cfg. */
4546 rtl_register_cfg_hooks ();
4548 init_block
= construct_init_block ();
4550 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4551 remaining edges later. */
4552 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
4553 e
->flags
&= ~EDGE_EXECUTABLE
;
4555 lab_rtx_for_bb
= pointer_map_create ();
4556 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4557 bb
= expand_gimple_basic_block (bb
);
4559 if (MAY_HAVE_DEBUG_INSNS
)
4560 expand_debug_locations ();
4562 /* Free stuff we no longer need after GIMPLE optimizations. */
4563 free_dominance_info (CDI_DOMINATORS
);
4564 free_dominance_info (CDI_POST_DOMINATORS
);
4565 delete_tree_cfg_annotations ();
4567 timevar_push (TV_OUT_OF_SSA
);
4568 finish_out_of_ssa (&SA
);
4569 timevar_pop (TV_OUT_OF_SSA
);
4571 timevar_push (TV_POST_EXPAND
);
4572 /* We are no longer in SSA form. */
4573 cfun
->gimple_df
->in_ssa_p
= false;
4575 loops_state_clear (LOOP_CLOSED_SSA
);
4577 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4578 conservatively to true until they are all profile aware. */
4579 pointer_map_destroy (lab_rtx_for_bb
);
4582 construct_exit_block ();
4583 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4584 insn_locators_finalize ();
4586 /* Zap the tree EH table. */
4587 set_eh_throw_stmt_table (cfun
, NULL
);
4589 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4590 split edges which edge insertions might do. */
4591 rebuild_jump_labels (get_insns ());
4593 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
4597 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4601 rebuild_jump_labels_chain (e
->insns
.r
);
4602 /* Avoid putting insns before parm_birth_insn. */
4603 if (e
->src
== ENTRY_BLOCK_PTR
4604 && single_succ_p (ENTRY_BLOCK_PTR
)
4607 rtx insns
= e
->insns
.r
;
4608 e
->insns
.r
= NULL_RTX
;
4609 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
4612 commit_one_edge_insertion (e
);
4619 /* We're done expanding trees to RTL. */
4620 currently_expanding_to_rtl
= 0;
4622 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4626 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4628 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4629 e
->flags
&= ~EDGE_EXECUTABLE
;
4631 /* At the moment not all abnormal edges match the RTL
4632 representation. It is safe to remove them here as
4633 find_many_sub_basic_blocks will rediscover them.
4634 In the future we should get this fixed properly. */
4635 if ((e
->flags
& EDGE_ABNORMAL
)
4636 && !(e
->flags
& EDGE_SIBCALL
))
4643 blocks
= sbitmap_alloc (last_basic_block
);
4644 sbitmap_ones (blocks
);
4645 find_many_sub_basic_blocks (blocks
);
4646 sbitmap_free (blocks
);
4647 purge_all_dead_edges ();
4649 expand_stack_alignment ();
4651 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4653 if (crtl
->tail_call_emit
)
4654 fixup_tail_calls ();
4656 /* After initial rtl generation, call back to finish generating
4657 exception support code. We need to do this before cleaning up
4658 the CFG as the code does not expect dead landing pads. */
4659 if (cfun
->eh
->region_tree
!= NULL
)
4660 finish_eh_generation ();
4662 /* Remove unreachable blocks, otherwise we cannot compute dominators
4663 which are needed for loop state verification. As a side-effect
4664 this also compacts blocks.
4665 ??? We cannot remove trivially dead insns here as for example
4666 the DRAP reg on i?86 is not magically live at this point.
4667 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4668 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
4670 #ifdef ENABLE_CHECKING
4671 verify_flow_info ();
4674 /* Initialize pseudos allocated for hard registers. */
4675 emit_initial_value_sets ();
4677 /* And finally unshare all RTL. */
4680 /* There's no need to defer outputting this function any more; we
4681 know we want to output it. */
4682 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
4684 /* Now that we're done expanding trees to RTL, we shouldn't have any
4685 more CONCATs anywhere. */
4686 generating_concat_p
= 0;
4691 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4692 /* And the pass manager will dump RTL for us. */
4695 /* If we're emitting a nested function, make sure its parent gets
4696 emitted as well. Doing otherwise confuses debug info. */
4699 for (parent
= DECL_CONTEXT (current_function_decl
);
4700 parent
!= NULL_TREE
;
4701 parent
= get_containing_scope (parent
))
4702 if (TREE_CODE (parent
) == FUNCTION_DECL
)
4703 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
4706 /* We are now committed to emitting code for this function. Do any
4707 preparation, such as emitting abstract debug info for the inline
4708 before it gets mangled by optimization. */
4709 if (cgraph_function_possibly_inlined_p (current_function_decl
))
4710 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
4712 TREE_ASM_WRITTEN (current_function_decl
) = 1;
4714 /* After expanding, the return labels are no longer needed. */
4715 return_label
= NULL
;
4716 naked_return_label
= NULL
;
4718 /* After expanding, the tm_restart map is no longer needed. */
4719 if (cfun
->gimple_df
->tm_restart
)
4721 htab_delete (cfun
->gimple_df
->tm_restart
);
4722 cfun
->gimple_df
->tm_restart
= NULL
;
4725 /* Tag the blocks with a depth number so that change_scope can find
4726 the common parent easily. */
4727 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
4728 default_rtl_profile ();
4730 timevar_pop (TV_POST_EXPAND
);
4735 struct rtl_opt_pass pass_expand
=
4739 "expand", /* name */
4741 gimple_expand_cfg
, /* execute */
4744 0, /* static_pass_number */
4745 TV_EXPAND
, /* tv_id */
4746 PROP_ssa
| PROP_gimple_leh
| PROP_cfg
4747 | PROP_gimple_lcx
, /* properties_required */
4748 PROP_rtl
, /* properties_provided */
4749 PROP_ssa
| PROP_trees
, /* properties_destroyed */
4750 TODO_verify_ssa
| TODO_verify_flow
4751 | TODO_verify_stmts
, /* todo_flags_start */
4752 TODO_ggc_collect
/* todo_flags_finish */