1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt
;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt
)
67 enum gimple_rhs_class grhs_class
;
69 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
71 if (grhs_class
== GIMPLE_TERNARY_RHS
)
72 t
= build3 (gimple_assign_rhs_code (stmt
),
73 TREE_TYPE (gimple_assign_lhs (stmt
)),
74 gimple_assign_rhs1 (stmt
),
75 gimple_assign_rhs2 (stmt
),
76 gimple_assign_rhs3 (stmt
));
77 else if (grhs_class
== GIMPLE_BINARY_RHS
)
78 t
= build2 (gimple_assign_rhs_code (stmt
),
79 TREE_TYPE (gimple_assign_lhs (stmt
)),
80 gimple_assign_rhs1 (stmt
),
81 gimple_assign_rhs2 (stmt
));
82 else if (grhs_class
== GIMPLE_UNARY_RHS
)
83 t
= build1 (gimple_assign_rhs_code (stmt
),
84 TREE_TYPE (gimple_assign_lhs (stmt
)),
85 gimple_assign_rhs1 (stmt
));
86 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
88 t
= gimple_assign_rhs1 (stmt
);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
91 && gimple_location (stmt
) != EXPR_LOCATION (t
))
92 || (gimple_block (stmt
)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt
) != TREE_BLOCK (t
)))
101 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
102 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
103 if (gimple_block (stmt
) && currently_expanding_to_rtl
&& EXPR_P (t
))
104 TREE_BLOCK (t
) = gimple_block (stmt
);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t
, rtx x
)
122 if (TREE_CODE (t
) == SSA_NAME
)
124 SA
.partition_to_pseudo
[var_to_partition (SA
.map
, t
)] = x
;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t
), x
);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x
&& x
!= pc_rtx
&& TREE_CODE (SSA_NAME_VAR (t
)) == VAR_DECL
)
132 tree var
= SSA_NAME_VAR (t
);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var
))
135 SET_DECL_RTL (var
, x
);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var
) == pc_rtx
)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var
) != x
)
147 SET_DECL_RTL (var
, pc_rtx
);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset
;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative
;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var
*stack_vars
;
188 static size_t stack_vars_alloc
;
189 static size_t stack_vars_num
;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted
;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase
;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls
;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer
;
208 /* Update stack alignment requirement. */
211 update_stack_alignment (unsigned int align
)
213 if (SUPPORTS_STACK_ALIGNMENT
)
215 if (crtl
->stack_alignment_estimated
< align
)
217 gcc_assert(!crtl
->stack_realign_processed
);
218 crtl
->stack_alignment_estimated
= align
;
222 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
223 So here we only make sure stack_alignment_needed >= align. */
224 if (crtl
->stack_alignment_needed
< align
)
225 crtl
->stack_alignment_needed
= align
;
226 if (crtl
->max_used_stack_slot_alignment
< align
)
227 crtl
->max_used_stack_slot_alignment
= align
;
230 /* Discover the byte alignment to use for DECL. Ignore alignment
231 we can't do with expected alignment of the stack boundary. */
234 get_decl_align_unit (tree decl
)
238 align
= LOCAL_DECL_ALIGNMENT (decl
);
240 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
241 align
= MAX_SUPPORTED_STACK_ALIGNMENT
;
243 update_stack_alignment (align
);
245 return align
/ BITS_PER_UNIT
;
248 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
249 Return the frame offset. */
252 alloc_stack_frame_space (HOST_WIDE_INT size
, HOST_WIDE_INT align
)
254 HOST_WIDE_INT offset
, new_frame_offset
;
256 new_frame_offset
= frame_offset
;
257 if (FRAME_GROWS_DOWNWARD
)
259 new_frame_offset
-= size
+ frame_phase
;
260 new_frame_offset
&= -align
;
261 new_frame_offset
+= frame_phase
;
262 offset
= new_frame_offset
;
266 new_frame_offset
-= frame_phase
;
267 new_frame_offset
+= align
- 1;
268 new_frame_offset
&= -align
;
269 new_frame_offset
+= frame_phase
;
270 offset
= new_frame_offset
;
271 new_frame_offset
+= size
;
273 frame_offset
= new_frame_offset
;
275 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
276 frame_offset
= offset
= 0;
281 /* Accumulate DECL into STACK_VARS. */
284 add_stack_var (tree decl
)
286 if (stack_vars_num
>= stack_vars_alloc
)
288 if (stack_vars_alloc
)
289 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
291 stack_vars_alloc
= 32;
293 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
295 stack_vars
[stack_vars_num
].decl
= decl
;
296 stack_vars
[stack_vars_num
].offset
= 0;
297 stack_vars
[stack_vars_num
].size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl
)), 1);
298 stack_vars
[stack_vars_num
].alignb
= get_decl_align_unit (SSAVAR (decl
));
300 /* All variables are initially in their own partition. */
301 stack_vars
[stack_vars_num
].representative
= stack_vars_num
;
302 stack_vars
[stack_vars_num
].next
= EOC
;
304 /* All variables initially conflict with no other. */
305 stack_vars
[stack_vars_num
].conflicts
= NULL
;
307 /* Ensure that this decl doesn't get put onto the list twice. */
308 set_rtl (decl
, pc_rtx
);
313 /* Make the decls associated with luid's X and Y conflict. */
316 add_stack_var_conflict (size_t x
, size_t y
)
318 struct stack_var
*a
= &stack_vars
[x
];
319 struct stack_var
*b
= &stack_vars
[y
];
321 a
->conflicts
= BITMAP_ALLOC (NULL
);
323 b
->conflicts
= BITMAP_ALLOC (NULL
);
324 bitmap_set_bit (a
->conflicts
, y
);
325 bitmap_set_bit (b
->conflicts
, x
);
328 /* Check whether the decls associated with luid's X and Y conflict. */
331 stack_var_conflict_p (size_t x
, size_t y
)
333 struct stack_var
*a
= &stack_vars
[x
];
334 struct stack_var
*b
= &stack_vars
[y
];
335 if (!a
->conflicts
|| !b
->conflicts
)
337 return bitmap_bit_p (a
->conflicts
, y
);
340 /* Returns true if TYPE is or contains a union type. */
343 aggregate_contains_union_type (tree type
)
347 if (TREE_CODE (type
) == UNION_TYPE
348 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
350 if (TREE_CODE (type
) == ARRAY_TYPE
)
351 return aggregate_contains_union_type (TREE_TYPE (type
));
352 if (TREE_CODE (type
) != RECORD_TYPE
)
355 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
356 if (TREE_CODE (field
) == FIELD_DECL
)
357 if (aggregate_contains_union_type (TREE_TYPE (field
)))
363 /* A subroutine of expand_used_vars. If two variables X and Y have alias
364 sets that do not conflict, then do add a conflict for these variables
365 in the interference graph. We also need to make sure to add conflicts
366 for union containing structures. Else RTL alias analysis comes along
367 and due to type based aliasing rules decides that for two overlapping
368 union temporaries { short s; int i; } accesses to the same mem through
369 different types may not alias and happily reorders stores across
370 life-time boundaries of the temporaries (See PR25654).
371 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
374 add_alias_set_conflicts (void)
376 size_t i
, j
, n
= stack_vars_num
;
378 for (i
= 0; i
< n
; ++i
)
380 tree type_i
= TREE_TYPE (stack_vars
[i
].decl
);
381 bool aggr_i
= AGGREGATE_TYPE_P (type_i
);
384 contains_union
= aggregate_contains_union_type (type_i
);
385 for (j
= 0; j
< i
; ++j
)
387 tree type_j
= TREE_TYPE (stack_vars
[j
].decl
);
388 bool aggr_j
= AGGREGATE_TYPE_P (type_j
);
390 /* Either the objects conflict by means of type based
391 aliasing rules, or we need to add a conflict. */
392 || !objects_must_conflict_p (type_i
, type_j
)
393 /* In case the types do not conflict ensure that access
394 to elements will conflict. In case of unions we have
395 to be careful as type based aliasing rules may say
396 access to the same memory does not conflict. So play
397 safe and add a conflict in this case. */
399 add_stack_var_conflict (i
, j
);
404 /* A subroutine of partition_stack_vars. A comparison function for qsort,
405 sorting an array of indices by the size and type of the object. */
408 stack_var_size_cmp (const void *a
, const void *b
)
410 HOST_WIDE_INT sa
= stack_vars
[*(const size_t *)a
].size
;
411 HOST_WIDE_INT sb
= stack_vars
[*(const size_t *)b
].size
;
413 unsigned int uida
, uidb
;
419 decla
= stack_vars
[*(const size_t *)a
].decl
;
420 declb
= stack_vars
[*(const size_t *)b
].decl
;
421 /* For stack variables of the same size use and id of the decls
422 to make the sort stable. Two SSA names are compared by their
423 version, SSA names come before non-SSA names, and two normal
424 decls are compared by their DECL_UID. */
425 if (TREE_CODE (decla
) == SSA_NAME
)
427 if (TREE_CODE (declb
) == SSA_NAME
)
428 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
432 else if (TREE_CODE (declb
) == SSA_NAME
)
435 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
444 /* If the points-to solution *PI points to variables that are in a partition
445 together with other variables add all partition members to the pointed-to
449 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
450 struct pointer_map_t
*decls_to_partitions
,
451 struct pointer_set_t
*visited
, bitmap temp
)
459 /* The pointed-to vars bitmap is shared, it is enough to
461 || pointer_set_insert(visited
, pt
->vars
))
466 /* By using a temporary bitmap to store all members of the partitions
467 we have to add we make sure to visit each of the partitions only
469 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
471 || !bitmap_bit_p (temp
, i
))
472 && (part
= (bitmap
*) pointer_map_contains (decls_to_partitions
,
473 (void *)(size_t) i
)))
474 bitmap_ior_into (temp
, *part
);
475 if (!bitmap_empty_p (temp
))
476 bitmap_ior_into (pt
->vars
, temp
);
479 /* Update points-to sets based on partition info, so we can use them on RTL.
480 The bitmaps representing stack partitions will be saved until expand,
481 where partitioned decls used as bases in memory expressions will be
485 update_alias_info_with_stack_vars (void)
487 struct pointer_map_t
*decls_to_partitions
= NULL
;
489 tree var
= NULL_TREE
;
491 for (i
= 0; i
< stack_vars_num
; i
++)
495 struct ptr_info_def
*pi
;
497 /* Not interested in partitions with single variable. */
498 if (stack_vars
[i
].representative
!= i
499 || stack_vars
[i
].next
== EOC
)
502 if (!decls_to_partitions
)
504 decls_to_partitions
= pointer_map_create ();
505 cfun
->gimple_df
->decls_to_pointers
= pointer_map_create ();
508 /* Create an SSA_NAME that points to the partition for use
509 as base during alias-oracle queries on RTL for bases that
510 have been partitioned. */
511 if (var
== NULL_TREE
)
512 var
= create_tmp_var (ptr_type_node
, NULL
);
513 name
= make_ssa_name (var
, NULL
);
515 /* Create bitmaps representing partitions. They will be used for
516 points-to sets later, so use GGC alloc. */
517 part
= BITMAP_GGC_ALLOC ();
518 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
520 tree decl
= stack_vars
[j
].decl
;
521 unsigned int uid
= DECL_PT_UID (decl
);
522 /* We should never end up partitioning SSA names (though they
523 may end up on the stack). Neither should we allocate stack
524 space to something that is unused and thus unreferenced. */
525 gcc_assert (DECL_P (decl
)
526 && referenced_var_lookup (DECL_UID (decl
)));
527 bitmap_set_bit (part
, uid
);
528 *((bitmap
*) pointer_map_insert (decls_to_partitions
,
529 (void *)(size_t) uid
)) = part
;
530 *((tree
*) pointer_map_insert (cfun
->gimple_df
->decls_to_pointers
,
534 /* Make the SSA name point to all partition members. */
535 pi
= get_ptr_info (name
);
536 pt_solution_set (&pi
->pt
, part
, false, false);
539 /* Make all points-to sets that contain one member of a partition
540 contain all members of the partition. */
541 if (decls_to_partitions
)
544 struct pointer_set_t
*visited
= pointer_set_create ();
545 bitmap temp
= BITMAP_ALLOC (NULL
);
547 for (i
= 1; i
< num_ssa_names
; i
++)
549 tree name
= ssa_name (i
);
550 struct ptr_info_def
*pi
;
553 && POINTER_TYPE_P (TREE_TYPE (name
))
554 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
555 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
559 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
560 decls_to_partitions
, visited
, temp
);
562 pointer_set_destroy (visited
);
563 pointer_map_destroy (decls_to_partitions
);
568 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
569 partitioning algorithm. Partitions A and B are known to be non-conflicting.
570 Merge them into a single partition A.
572 At the same time, add OFFSET to all variables in partition B. At the end
573 of the partitioning process we've have a nice block easy to lay out within
577 union_stack_vars (size_t a
, size_t b
, HOST_WIDE_INT offset
)
580 struct stack_var
*vb
= &stack_vars
[b
];
584 /* Update each element of partition B with the given offset,
585 and merge them into partition A. */
586 for (last
= i
= b
; i
!= EOC
; last
= i
, i
= stack_vars
[i
].next
)
588 stack_vars
[i
].offset
+= offset
;
589 stack_vars
[i
].representative
= a
;
591 stack_vars
[last
].next
= stack_vars
[a
].next
;
592 stack_vars
[a
].next
= b
;
594 /* Update the required alignment of partition A to account for B. */
595 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
596 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
598 /* Update the interference graph and merge the conflicts. */
601 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
602 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
603 BITMAP_FREE (vb
->conflicts
);
607 /* A subroutine of expand_used_vars. Binpack the variables into
608 partitions constrained by the interference graph. The overall
609 algorithm used is as follows:
611 Sort the objects by size.
616 Look for the largest non-conflicting object B with size <= S.
626 partition_stack_vars (void)
628 size_t si
, sj
, n
= stack_vars_num
;
630 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
631 for (si
= 0; si
< n
; ++si
)
632 stack_vars_sorted
[si
] = si
;
637 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_size_cmp
);
639 for (si
= 0; si
< n
; ++si
)
641 size_t i
= stack_vars_sorted
[si
];
642 HOST_WIDE_INT isize
= stack_vars
[i
].size
;
643 HOST_WIDE_INT offset
= 0;
645 for (sj
= si
; sj
-- > 0; )
647 size_t j
= stack_vars_sorted
[sj
];
648 HOST_WIDE_INT jsize
= stack_vars
[j
].size
;
649 unsigned int jalign
= stack_vars
[j
].alignb
;
651 /* Ignore objects that aren't partition representatives. */
652 if (stack_vars
[j
].representative
!= j
)
655 /* Ignore objects too large for the remaining space. */
659 /* Ignore conflicting objects. */
660 if (stack_var_conflict_p (i
, j
))
663 /* Refine the remaining space check to include alignment. */
664 if (offset
& (jalign
- 1))
666 HOST_WIDE_INT toff
= offset
;
668 toff
&= -(HOST_WIDE_INT
)jalign
;
669 if (isize
- (toff
- offset
) < jsize
)
672 isize
-= toff
- offset
;
676 /* UNION the objects, placing J at OFFSET. */
677 union_stack_vars (i
, j
, offset
);
686 update_alias_info_with_stack_vars ();
689 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
692 dump_stack_var_partition (void)
694 size_t si
, i
, j
, n
= stack_vars_num
;
696 for (si
= 0; si
< n
; ++si
)
698 i
= stack_vars_sorted
[si
];
700 /* Skip variables that aren't partition representatives, for now. */
701 if (stack_vars
[i
].representative
!= i
)
704 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
705 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
706 stack_vars
[i
].alignb
);
708 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
710 fputc ('\t', dump_file
);
711 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
712 fprintf (dump_file
, ", offset " HOST_WIDE_INT_PRINT_DEC
"\n",
713 stack_vars
[j
].offset
);
718 /* Assign rtl to DECL at frame offset OFFSET. */
721 expand_one_stack_var_at (tree decl
, HOST_WIDE_INT offset
)
723 /* Alignment is unsigned. */
724 unsigned HOST_WIDE_INT align
, max_align
;
727 /* If this fails, we've overflowed the stack frame. Error nicely? */
728 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
730 x
= plus_constant (virtual_stack_vars_rtx
, offset
);
731 x
= gen_rtx_MEM (DECL_MODE (SSAVAR (decl
)), x
);
733 if (TREE_CODE (decl
) != SSA_NAME
)
735 /* Set alignment we actually gave this decl if it isn't an SSA name.
736 If it is we generate stack slots only accidentally so it isn't as
737 important, we'll simply use the alignment that is already set. */
738 offset
-= frame_phase
;
739 align
= offset
& -offset
;
740 align
*= BITS_PER_UNIT
;
741 max_align
= crtl
->max_used_stack_slot_alignment
;
742 if (align
== 0 || align
> max_align
)
745 DECL_ALIGN (decl
) = align
;
746 DECL_USER_ALIGN (decl
) = 0;
749 set_mem_attributes (x
, SSAVAR (decl
), true);
753 /* A subroutine of expand_used_vars. Give each partition representative
754 a unique location within the stack frame. Update each partition member
755 with that location. */
758 expand_stack_vars (bool (*pred
) (tree
))
760 size_t si
, i
, j
, n
= stack_vars_num
;
762 for (si
= 0; si
< n
; ++si
)
764 HOST_WIDE_INT offset
;
766 i
= stack_vars_sorted
[si
];
768 /* Skip variables that aren't partition representatives, for now. */
769 if (stack_vars
[i
].representative
!= i
)
772 /* Skip variables that have already had rtl assigned. See also
773 add_stack_var where we perpetrate this pc_rtx hack. */
774 if ((TREE_CODE (stack_vars
[i
].decl
) == SSA_NAME
775 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, stack_vars
[i
].decl
)]
776 : DECL_RTL (stack_vars
[i
].decl
)) != pc_rtx
)
779 /* Check the predicate to see whether this variable should be
780 allocated in this pass. */
781 if (pred
&& !pred (stack_vars
[i
].decl
))
784 offset
= alloc_stack_frame_space (stack_vars
[i
].size
,
785 stack_vars
[i
].alignb
);
787 /* Create rtl for each variable based on their location within the
789 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
791 gcc_assert (stack_vars
[j
].offset
<= stack_vars
[i
].size
);
792 expand_one_stack_var_at (stack_vars
[j
].decl
,
793 stack_vars
[j
].offset
+ offset
);
798 /* Take into account all sizes of partitions and reset DECL_RTLs. */
800 account_stack_vars (void)
802 size_t si
, j
, i
, n
= stack_vars_num
;
803 HOST_WIDE_INT size
= 0;
805 for (si
= 0; si
< n
; ++si
)
807 i
= stack_vars_sorted
[si
];
809 /* Skip variables that aren't partition representatives, for now. */
810 if (stack_vars
[i
].representative
!= i
)
813 size
+= stack_vars
[i
].size
;
814 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
815 set_rtl (stack_vars
[j
].decl
, NULL
);
820 /* A subroutine of expand_one_var. Called to immediately assign rtl
821 to a variable to be allocated in the stack frame. */
824 expand_one_stack_var (tree var
)
826 HOST_WIDE_INT size
, offset
, align
;
828 size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var
)), 1);
829 align
= get_decl_align_unit (SSAVAR (var
));
830 offset
= alloc_stack_frame_space (size
, align
);
832 expand_one_stack_var_at (var
, offset
);
835 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
836 that will reside in a hard register. */
839 expand_one_hard_reg_var (tree var
)
841 rest_of_decl_compilation (var
, 0, 0);
844 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
845 that will reside in a pseudo register. */
848 expand_one_register_var (tree var
)
850 tree decl
= SSAVAR (var
);
851 tree type
= TREE_TYPE (decl
);
852 enum machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
853 rtx x
= gen_reg_rtx (reg_mode
);
857 /* Note if the object is a user variable. */
858 if (!DECL_ARTIFICIAL (decl
))
861 if (POINTER_TYPE_P (type
))
862 mark_reg_pointer (x
, TYPE_ALIGN (TREE_TYPE (type
)));
865 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
866 has some associated error, e.g. its type is error-mark. We just need
867 to pick something that won't crash the rest of the compiler. */
870 expand_one_error_var (tree var
)
872 enum machine_mode mode
= DECL_MODE (var
);
876 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
877 else if (mode
== VOIDmode
)
880 x
= gen_reg_rtx (mode
);
882 SET_DECL_RTL (var
, x
);
885 /* A subroutine of expand_one_var. VAR is a variable that will be
886 allocated to the local stack frame. Return true if we wish to
887 add VAR to STACK_VARS so that it will be coalesced with other
888 variables. Return false to allocate VAR immediately.
890 This function is used to reduce the number of variables considered
891 for coalescing, which reduces the size of the quadratic problem. */
894 defer_stack_allocation (tree var
, bool toplevel
)
896 /* If stack protection is enabled, *all* stack variables must be deferred,
897 so that we can re-order the strings to the top of the frame. */
898 if (flag_stack_protect
)
901 /* Variables in the outermost scope automatically conflict with
902 every other variable. The only reason to want to defer them
903 at all is that, after sorting, we can more efficiently pack
904 small variables in the stack frame. Continue to defer at -O2. */
905 if (toplevel
&& optimize
< 2)
908 /* Without optimization, *most* variables are allocated from the
909 stack, which makes the quadratic problem large exactly when we
910 want compilation to proceed as quickly as possible. On the
911 other hand, we don't want the function's stack frame size to
912 get completely out of hand. So we avoid adding scalars and
913 "small" aggregates to the list at all. */
914 if (optimize
== 0 && tree_low_cst (DECL_SIZE_UNIT (var
), 1) < 32)
920 /* A subroutine of expand_used_vars. Expand one variable according to
921 its flavor. Variables to be placed on the stack are not actually
922 expanded yet, merely recorded.
923 When REALLY_EXPAND is false, only add stack values to be allocated.
924 Return stack usage this variable is supposed to take.
928 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
933 if (SUPPORTS_STACK_ALIGNMENT
934 && TREE_TYPE (var
) != error_mark_node
935 && TREE_CODE (var
) == VAR_DECL
)
939 /* Because we don't know if VAR will be in register or on stack,
940 we conservatively assume it will be on stack even if VAR is
941 eventually put into register after RA pass. For non-automatic
942 variables, which won't be on stack, we collect alignment of
943 type and ignore user specified alignment. */
944 if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
945 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
946 TYPE_MODE (TREE_TYPE (var
)),
947 TYPE_ALIGN (TREE_TYPE (var
)));
948 else if (DECL_HAS_VALUE_EXPR_P (var
)
949 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
950 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
951 or variables which were assigned a stack slot already by
952 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
953 changed from the offset chosen to it. */
954 align
= crtl
->stack_alignment_estimated
;
956 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
958 if (crtl
->stack_alignment_estimated
< align
)
960 /* stack_alignment_estimated shouldn't change after stack
961 realign decision made */
962 gcc_assert(!crtl
->stack_realign_processed
);
963 crtl
->stack_alignment_estimated
= align
;
967 if (TREE_CODE (origvar
) == SSA_NAME
)
969 gcc_assert (TREE_CODE (var
) != VAR_DECL
970 || (!DECL_EXTERNAL (var
)
971 && !DECL_HAS_VALUE_EXPR_P (var
)
972 && !TREE_STATIC (var
)
973 && TREE_TYPE (var
) != error_mark_node
974 && !DECL_HARD_REGISTER (var
)
977 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
979 else if (DECL_EXTERNAL (var
))
981 else if (DECL_HAS_VALUE_EXPR_P (var
))
983 else if (TREE_STATIC (var
))
985 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
987 else if (TREE_TYPE (var
) == error_mark_node
)
990 expand_one_error_var (var
);
992 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
995 expand_one_hard_reg_var (var
);
997 else if (use_register_for_decl (var
))
1000 expand_one_register_var (origvar
);
1002 else if (!host_integerp (DECL_SIZE_UNIT (var
), 1))
1006 error ("size of variable %q+D is too large", var
);
1007 expand_one_error_var (var
);
1010 else if (defer_stack_allocation (var
, toplevel
))
1011 add_stack_var (origvar
);
1015 expand_one_stack_var (origvar
);
1016 return tree_low_cst (DECL_SIZE_UNIT (var
), 1);
1021 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1022 expanding variables. Those variables that can be put into registers
1023 are allocated pseudos; those that can't are put on the stack.
1025 TOPLEVEL is true if this is the outermost BLOCK. */
1028 expand_used_vars_for_block (tree block
, bool toplevel
)
1030 size_t i
, j
, old_sv_num
, this_sv_num
, new_sv_num
;
1033 old_sv_num
= toplevel
? 0 : stack_vars_num
;
1035 /* Expand all variables at this level. */
1036 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1038 expand_one_var (t
, toplevel
, true);
1040 this_sv_num
= stack_vars_num
;
1042 /* Expand all variables at containing levels. */
1043 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1044 expand_used_vars_for_block (t
, false);
1046 /* Since we do not track exact variable lifetimes (which is not even
1047 possible for variables whose address escapes), we mirror the block
1048 tree in the interference graph. Here we cause all variables at this
1049 level, and all sublevels, to conflict. */
1050 if (old_sv_num
< this_sv_num
)
1052 new_sv_num
= stack_vars_num
;
1054 for (i
= old_sv_num
; i
< new_sv_num
; ++i
)
1055 for (j
= i
< this_sv_num
? i
: this_sv_num
; j
-- > old_sv_num
;)
1056 add_stack_var_conflict (i
, j
);
1060 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1061 and clear TREE_USED on all local variables. */
1064 clear_tree_used (tree block
)
1068 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1069 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1072 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1073 clear_tree_used (t
);
1076 /* Examine TYPE and determine a bit mask of the following features. */
1078 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1079 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1080 #define SPCT_HAS_ARRAY 4
1081 #define SPCT_HAS_AGGREGATE 8
1084 stack_protect_classify_type (tree type
)
1086 unsigned int ret
= 0;
1089 switch (TREE_CODE (type
))
1092 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1093 if (t
== char_type_node
1094 || t
== signed_char_type_node
1095 || t
== unsigned_char_type_node
)
1097 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1098 unsigned HOST_WIDE_INT len
;
1100 if (!TYPE_SIZE_UNIT (type
)
1101 || !host_integerp (TYPE_SIZE_UNIT (type
), 1))
1104 len
= tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
1107 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1109 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1112 ret
= SPCT_HAS_ARRAY
;
1116 case QUAL_UNION_TYPE
:
1118 ret
= SPCT_HAS_AGGREGATE
;
1119 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1120 if (TREE_CODE (t
) == FIELD_DECL
)
1121 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1131 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1132 part of the local stack frame. Remember if we ever return nonzero for
1133 any variable in this function. The return value is the phase number in
1134 which the variable should be allocated. */
1137 stack_protect_decl_phase (tree decl
)
1139 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1142 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1143 has_short_buffer
= true;
1145 if (flag_stack_protect
== 2)
1147 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1148 && !(bits
& SPCT_HAS_AGGREGATE
))
1150 else if (bits
& SPCT_HAS_ARRAY
)
1154 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1157 has_protected_decls
= true;
1162 /* Two helper routines that check for phase 1 and phase 2. These are used
1163 as callbacks for expand_stack_vars. */
1166 stack_protect_decl_phase_1 (tree decl
)
1168 return stack_protect_decl_phase (decl
) == 1;
1172 stack_protect_decl_phase_2 (tree decl
)
1174 return stack_protect_decl_phase (decl
) == 2;
1177 /* Ensure that variables in different stack protection phases conflict
1178 so that they are not merged and share the same stack slot. */
1181 add_stack_protection_conflicts (void)
1183 size_t i
, j
, n
= stack_vars_num
;
1184 unsigned char *phase
;
1186 phase
= XNEWVEC (unsigned char, n
);
1187 for (i
= 0; i
< n
; ++i
)
1188 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1190 for (i
= 0; i
< n
; ++i
)
1192 unsigned char ph_i
= phase
[i
];
1193 for (j
= 0; j
< i
; ++j
)
1194 if (ph_i
!= phase
[j
])
1195 add_stack_var_conflict (i
, j
);
1201 /* Create a decl for the guard at the top of the stack frame. */
1204 create_stack_guard (void)
1206 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1207 VAR_DECL
, NULL
, ptr_type_node
);
1208 TREE_THIS_VOLATILE (guard
) = 1;
1209 TREE_USED (guard
) = 1;
1210 expand_one_stack_var (guard
);
1211 crtl
->stack_protect_guard
= guard
;
1214 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1215 expanding variables. Those variables that can be put into registers
1216 are allocated pseudos; those that can't are put on the stack.
1218 TOPLEVEL is true if this is the outermost BLOCK. */
1220 static HOST_WIDE_INT
1221 account_used_vars_for_block (tree block
, bool toplevel
)
1224 HOST_WIDE_INT size
= 0;
1226 /* Expand all variables at this level. */
1227 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1229 size
+= expand_one_var (t
, toplevel
, false);
1231 /* Expand all variables at containing levels. */
1232 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1233 size
+= account_used_vars_for_block (t
, false);
1238 /* Prepare for expanding variables. */
1240 init_vars_expansion (void)
1244 /* Set TREE_USED on all variables in the local_decls. */
1245 FOR_EACH_LOCAL_DECL (cfun
, ix
, t
)
1248 /* Clear TREE_USED on all variables associated with a block scope. */
1249 clear_tree_used (DECL_INITIAL (current_function_decl
));
1251 /* Initialize local stack smashing state. */
1252 has_protected_decls
= false;
1253 has_short_buffer
= false;
1256 /* Free up stack variable graph data. */
1258 fini_vars_expansion (void)
1260 size_t i
, n
= stack_vars_num
;
1261 for (i
= 0; i
< n
; i
++)
1262 BITMAP_FREE (stack_vars
[i
].conflicts
);
1263 XDELETEVEC (stack_vars
);
1264 XDELETEVEC (stack_vars_sorted
);
1266 stack_vars_alloc
= stack_vars_num
= 0;
1269 /* Make a fair guess for the size of the stack frame of the decl
1270 passed. This doesn't have to be exact, the result is only used
1271 in the inline heuristics. So we don't want to run the full stack
1272 var packing algorithm (which is quadratic in the number of stack
1273 vars). Instead, we calculate the total size of all stack vars.
1274 This turns out to be a pretty fair estimate -- packing of stack
1275 vars doesn't happen very often. */
1278 estimated_stack_frame_size (tree decl
)
1280 HOST_WIDE_INT size
= 0;
1282 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1284 tree old_cur_fun_decl
= current_function_decl
;
1285 current_function_decl
= decl
;
1286 push_cfun (DECL_STRUCT_FUNCTION (decl
));
1288 init_vars_expansion ();
1290 FOR_EACH_LOCAL_DECL (cfun
, ix
, var
)
1292 if (TREE_USED (var
))
1293 size
+= expand_one_var (var
, true, false);
1294 TREE_USED (var
) = 1;
1296 size
+= account_used_vars_for_block (outer_block
, true);
1298 if (stack_vars_num
> 0)
1300 /* Fake sorting the stack vars for account_stack_vars (). */
1301 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1302 for (i
= 0; i
< stack_vars_num
; ++i
)
1303 stack_vars_sorted
[i
] = i
;
1304 size
+= account_stack_vars ();
1305 fini_vars_expansion ();
1308 current_function_decl
= old_cur_fun_decl
;
1312 /* Expand all variables used in the function. */
1315 expand_used_vars (void)
1317 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1318 VEC(tree
,heap
) *maybe_local_decls
= NULL
;
1322 /* Compute the phase of the stack frame for this function. */
1324 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1325 int off
= STARTING_FRAME_OFFSET
% align
;
1326 frame_phase
= off
? align
- off
: 0;
1329 init_vars_expansion ();
1331 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
1333 tree var
= partition_to_var (SA
.map
, i
);
1335 gcc_assert (is_gimple_reg (var
));
1336 if (TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
1337 expand_one_var (var
, true, true);
1340 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1341 contain the default def (representing the parm or result itself)
1342 we don't do anything here. But those which don't contain the
1343 default def (representing a temporary based on the parm/result)
1344 we need to allocate space just like for normal VAR_DECLs. */
1345 if (!bitmap_bit_p (SA
.partition_has_default_def
, i
))
1347 expand_one_var (var
, true, true);
1348 gcc_assert (SA
.partition_to_pseudo
[i
]);
1353 /* At this point all variables on the local_decls with TREE_USED
1354 set are not associated with any block scope. Lay them out. */
1356 len
= VEC_length (tree
, cfun
->local_decls
);
1357 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1359 bool expand_now
= false;
1361 /* Expanded above already. */
1362 if (is_gimple_reg (var
))
1364 TREE_USED (var
) = 0;
1367 /* We didn't set a block for static or extern because it's hard
1368 to tell the difference between a global variable (re)declared
1369 in a local scope, and one that's really declared there to
1370 begin with. And it doesn't really matter much, since we're
1371 not giving them stack space. Expand them now. */
1372 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1375 /* If the variable is not associated with any block, then it
1376 was created by the optimizers, and could be live anywhere
1378 else if (TREE_USED (var
))
1381 /* Finally, mark all variables on the list as used. We'll use
1382 this in a moment when we expand those associated with scopes. */
1383 TREE_USED (var
) = 1;
1386 expand_one_var (var
, true, true);
1389 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
1391 rtx rtl
= DECL_RTL_IF_SET (var
);
1393 /* Keep artificial non-ignored vars in cfun->local_decls
1394 chain until instantiate_decls. */
1395 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1396 add_local_decl (cfun
, var
);
1397 else if (rtl
== NULL_RTX
)
1398 /* If rtl isn't set yet, which can happen e.g. with
1399 -fstack-protector, retry before returning from this
1401 VEC_safe_push (tree
, heap
, maybe_local_decls
, var
);
1405 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1407 +-----------------+-----------------+
1408 | ...processed... | ...duplicates...|
1409 +-----------------+-----------------+
1411 +-- LEN points here.
1413 We just want the duplicates, as those are the artificial
1414 non-ignored vars that we want to keep until instantiate_decls.
1415 Move them down and truncate the array. */
1416 if (!VEC_empty (tree
, cfun
->local_decls
))
1417 VEC_block_remove (tree
, cfun
->local_decls
, 0, len
);
1419 /* At this point, all variables within the block tree with TREE_USED
1420 set are actually used by the optimized function. Lay them out. */
1421 expand_used_vars_for_block (outer_block
, true);
1423 if (stack_vars_num
> 0)
1425 /* Due to the way alias sets work, no variables with non-conflicting
1426 alias sets may be assigned the same address. Add conflicts to
1428 add_alias_set_conflicts ();
1430 /* If stack protection is enabled, we don't share space between
1431 vulnerable data and non-vulnerable data. */
1432 if (flag_stack_protect
)
1433 add_stack_protection_conflicts ();
1435 /* Now that we have collected all stack variables, and have computed a
1436 minimal interference graph, attempt to save some stack space. */
1437 partition_stack_vars ();
1439 dump_stack_var_partition ();
1442 /* There are several conditions under which we should create a
1443 stack guard: protect-all, alloca used, protected decls present. */
1444 if (flag_stack_protect
== 2
1445 || (flag_stack_protect
1446 && (cfun
->calls_alloca
|| has_protected_decls
)))
1447 create_stack_guard ();
1449 /* Assign rtl to each variable based on these partitions. */
1450 if (stack_vars_num
> 0)
1452 /* Reorder decls to be protected by iterating over the variables
1453 array multiple times, and allocating out of each phase in turn. */
1454 /* ??? We could probably integrate this into the qsort we did
1455 earlier, such that we naturally see these variables first,
1456 and thus naturally allocate things in the right order. */
1457 if (has_protected_decls
)
1459 /* Phase 1 contains only character arrays. */
1460 expand_stack_vars (stack_protect_decl_phase_1
);
1462 /* Phase 2 contains other kinds of arrays. */
1463 if (flag_stack_protect
== 2)
1464 expand_stack_vars (stack_protect_decl_phase_2
);
1467 expand_stack_vars (NULL
);
1469 fini_vars_expansion ();
1472 /* If there were any artificial non-ignored vars without rtl
1473 found earlier, see if deferred stack allocation hasn't assigned
1475 FOR_EACH_VEC_ELT_REVERSE (tree
, maybe_local_decls
, i
, var
)
1477 rtx rtl
= DECL_RTL_IF_SET (var
);
1479 /* Keep artificial non-ignored vars in cfun->local_decls
1480 chain until instantiate_decls. */
1481 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1482 add_local_decl (cfun
, var
);
1484 VEC_free (tree
, heap
, maybe_local_decls
);
1486 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1487 if (STACK_ALIGNMENT_NEEDED
)
1489 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1490 if (!FRAME_GROWS_DOWNWARD
)
1491 frame_offset
+= align
- 1;
1492 frame_offset
&= -align
;
1497 /* If we need to produce a detailed dump, print the tree representation
1498 for STMT to the dump file. SINCE is the last RTX after which the RTL
1499 generated for STMT should have been appended. */
1502 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx since
)
1504 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1506 fprintf (dump_file
, "\n;; ");
1507 print_gimple_stmt (dump_file
, stmt
, 0,
1508 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
1509 fprintf (dump_file
, "\n");
1511 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
1515 /* Maps the blocks that do not contain tree labels to rtx labels. */
1517 static struct pointer_map_t
*lab_rtx_for_bb
;
1519 /* Returns the label_rtx expression for a label starting basic block BB. */
1522 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
1524 gimple_stmt_iterator gsi
;
1529 if (bb
->flags
& BB_RTL
)
1530 return block_label (bb
);
1532 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
1536 /* Find the tree label if it is present. */
1538 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1540 lab_stmt
= gsi_stmt (gsi
);
1541 if (gimple_code (lab_stmt
) != GIMPLE_LABEL
)
1544 lab
= gimple_label_label (lab_stmt
);
1545 if (DECL_NONLOCAL (lab
))
1548 return label_rtx (lab
);
1551 elt
= pointer_map_insert (lab_rtx_for_bb
, bb
);
1552 *elt
= gen_label_rtx ();
1557 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1558 of a basic block where we just expanded the conditional at the end,
1559 possibly clean up the CFG and instruction sequence. LAST is the
1560 last instruction before the just emitted jump sequence. */
1563 maybe_cleanup_end_of_block (edge e
, rtx last
)
1565 /* Special case: when jumpif decides that the condition is
1566 trivial it emits an unconditional jump (and the necessary
1567 barrier). But we still have two edges, the fallthru one is
1568 wrong. purge_dead_edges would clean this up later. Unfortunately
1569 we have to insert insns (and split edges) before
1570 find_many_sub_basic_blocks and hence before purge_dead_edges.
1571 But splitting edges might create new blocks which depend on the
1572 fact that if there are two edges there's no barrier. So the
1573 barrier would get lost and verify_flow_info would ICE. Instead
1574 of auditing all edge splitters to care for the barrier (which
1575 normally isn't there in a cleaned CFG), fix it here. */
1576 if (BARRIER_P (get_last_insn ()))
1580 /* Now, we have a single successor block, if we have insns to
1581 insert on the remaining edge we potentially will insert
1582 it at the end of this block (if the dest block isn't feasible)
1583 in order to avoid splitting the edge. This insertion will take
1584 place in front of the last jump. But we might have emitted
1585 multiple jumps (conditional and one unconditional) to the
1586 same destination. Inserting in front of the last one then
1587 is a problem. See PR 40021. We fix this by deleting all
1588 jumps except the last unconditional one. */
1589 insn
= PREV_INSN (get_last_insn ());
1590 /* Make sure we have an unconditional jump. Otherwise we're
1592 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
1593 for (insn
= PREV_INSN (insn
); insn
!= last
;)
1595 insn
= PREV_INSN (insn
);
1596 if (JUMP_P (NEXT_INSN (insn
)))
1597 delete_insn (NEXT_INSN (insn
));
1602 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1603 Returns a new basic block if we've terminated the current basic
1604 block and created a new one. */
1607 expand_gimple_cond (basic_block bb
, gimple stmt
)
1609 basic_block new_bb
, dest
;
1614 enum tree_code code
;
1617 code
= gimple_cond_code (stmt
);
1618 op0
= gimple_cond_lhs (stmt
);
1619 op1
= gimple_cond_rhs (stmt
);
1620 /* We're sometimes presented with such code:
1624 This would expand to two comparisons which then later might
1625 be cleaned up by combine. But some pattern matchers like if-conversion
1626 work better when there's only one compare, so make up for this
1627 here as special exception if TER would have made the same change. */
1628 if (gimple_cond_single_var_p (stmt
)
1630 && TREE_CODE (op0
) == SSA_NAME
1631 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
1633 gimple second
= SSA_NAME_DEF_STMT (op0
);
1634 if (gimple_code (second
) == GIMPLE_ASSIGN
)
1636 enum tree_code code2
= gimple_assign_rhs_code (second
);
1637 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
1640 op0
= gimple_assign_rhs1 (second
);
1641 op1
= gimple_assign_rhs2 (second
);
1643 /* If jumps are cheap turn some more codes into
1645 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1647 if ((code2
== BIT_AND_EXPR
1648 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
1649 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
1650 || code2
== TRUTH_AND_EXPR
)
1652 code
= TRUTH_ANDIF_EXPR
;
1653 op0
= gimple_assign_rhs1 (second
);
1654 op1
= gimple_assign_rhs2 (second
);
1656 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
1658 code
= TRUTH_ORIF_EXPR
;
1659 op0
= gimple_assign_rhs1 (second
);
1660 op1
= gimple_assign_rhs2 (second
);
1666 last2
= last
= get_last_insn ();
1668 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1669 if (gimple_has_location (stmt
))
1671 set_curr_insn_source_location (gimple_location (stmt
));
1672 set_curr_insn_block (gimple_block (stmt
));
1675 /* These flags have no purpose in RTL land. */
1676 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
1677 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
1679 /* We can either have a pure conditional jump with one fallthru edge or
1680 two-way jump that needs to be decomposed into two basic blocks. */
1681 if (false_edge
->dest
== bb
->next_bb
)
1683 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1684 true_edge
->probability
);
1685 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1686 if (true_edge
->goto_locus
)
1688 set_curr_insn_source_location (true_edge
->goto_locus
);
1689 set_curr_insn_block (true_edge
->goto_block
);
1690 true_edge
->goto_locus
= curr_insn_locator ();
1692 true_edge
->goto_block
= NULL
;
1693 false_edge
->flags
|= EDGE_FALLTHRU
;
1694 maybe_cleanup_end_of_block (false_edge
, last
);
1697 if (true_edge
->dest
== bb
->next_bb
)
1699 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
1700 false_edge
->probability
);
1701 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1702 if (false_edge
->goto_locus
)
1704 set_curr_insn_source_location (false_edge
->goto_locus
);
1705 set_curr_insn_block (false_edge
->goto_block
);
1706 false_edge
->goto_locus
= curr_insn_locator ();
1708 false_edge
->goto_block
= NULL
;
1709 true_edge
->flags
|= EDGE_FALLTHRU
;
1710 maybe_cleanup_end_of_block (true_edge
, last
);
1714 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1715 true_edge
->probability
);
1716 last
= get_last_insn ();
1717 if (false_edge
->goto_locus
)
1719 set_curr_insn_source_location (false_edge
->goto_locus
);
1720 set_curr_insn_block (false_edge
->goto_block
);
1721 false_edge
->goto_locus
= curr_insn_locator ();
1723 false_edge
->goto_block
= NULL
;
1724 emit_jump (label_rtx_for_bb (false_edge
->dest
));
1727 if (BARRIER_P (BB_END (bb
)))
1728 BB_END (bb
) = PREV_INSN (BB_END (bb
));
1729 update_bb_for_insn (bb
);
1731 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
1732 dest
= false_edge
->dest
;
1733 redirect_edge_succ (false_edge
, new_bb
);
1734 false_edge
->flags
|= EDGE_FALLTHRU
;
1735 new_bb
->count
= false_edge
->count
;
1736 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
1737 new_edge
= make_edge (new_bb
, dest
, 0);
1738 new_edge
->probability
= REG_BR_PROB_BASE
;
1739 new_edge
->count
= new_bb
->count
;
1740 if (BARRIER_P (BB_END (new_bb
)))
1741 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
1742 update_bb_for_insn (new_bb
);
1744 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
1746 if (true_edge
->goto_locus
)
1748 set_curr_insn_source_location (true_edge
->goto_locus
);
1749 set_curr_insn_block (true_edge
->goto_block
);
1750 true_edge
->goto_locus
= curr_insn_locator ();
1752 true_edge
->goto_block
= NULL
;
1757 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1761 expand_call_stmt (gimple stmt
)
1764 tree lhs
= gimple_call_lhs (stmt
);
1769 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
1771 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
1772 decl
= gimple_call_fndecl (stmt
);
1773 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
1775 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
1776 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
1778 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1780 tree arg
= gimple_call_arg (stmt
, i
);
1782 /* TER addresses into arguments of builtin functions so we have a
1783 chance to infer more correct alignment information. See PR39954. */
1785 && TREE_CODE (arg
) == SSA_NAME
1786 && (def
= get_gimple_for_ssa_name (arg
))
1787 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
1788 arg
= gimple_assign_rhs1 (def
);
1789 CALL_EXPR_ARG (exp
, i
) = arg
;
1792 if (gimple_has_side_effects (stmt
))
1793 TREE_SIDE_EFFECTS (exp
) = 1;
1795 if (gimple_call_nothrow_p (stmt
))
1796 TREE_NOTHROW (exp
) = 1;
1798 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
1799 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
1800 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
1801 CALL_CANNOT_INLINE_P (exp
) = gimple_call_cannot_inline_p (stmt
);
1802 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
1803 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
1804 TREE_BLOCK (exp
) = gimple_block (stmt
);
1807 expand_assignment (lhs
, exp
, false);
1809 expand_expr_real_1 (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
, NULL
);
1812 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1813 STMT that doesn't require special handling for outgoing edges. That
1814 is no tailcalls and no GIMPLE_COND. */
1817 expand_gimple_stmt_1 (gimple stmt
)
1820 switch (gimple_code (stmt
))
1823 op0
= gimple_goto_dest (stmt
);
1824 if (TREE_CODE (op0
) == LABEL_DECL
)
1827 expand_computed_goto (op0
);
1830 expand_label (gimple_label_label (stmt
));
1833 case GIMPLE_PREDICT
:
1839 expand_asm_stmt (stmt
);
1842 expand_call_stmt (stmt
);
1846 op0
= gimple_return_retval (stmt
);
1848 if (op0
&& op0
!= error_mark_node
)
1850 tree result
= DECL_RESULT (current_function_decl
);
1852 /* If we are not returning the current function's RESULT_DECL,
1853 build an assignment to it. */
1856 /* I believe that a function's RESULT_DECL is unique. */
1857 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
1859 /* ??? We'd like to use simply expand_assignment here,
1860 but this fails if the value is of BLKmode but the return
1861 decl is a register. expand_return has special handling
1862 for this combination, which eventually should move
1863 to common code. See comments there. Until then, let's
1864 build a modify expression :-/ */
1865 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
1870 expand_null_return ();
1872 expand_return (op0
);
1877 tree lhs
= gimple_assign_lhs (stmt
);
1879 /* Tree expand used to fiddle with |= and &= of two bitfield
1880 COMPONENT_REFs here. This can't happen with gimple, the LHS
1881 of binary assigns must be a gimple reg. */
1883 if (TREE_CODE (lhs
) != SSA_NAME
1884 || get_gimple_rhs_class (gimple_expr_code (stmt
))
1885 == GIMPLE_SINGLE_RHS
)
1887 tree rhs
= gimple_assign_rhs1 (stmt
);
1888 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
1889 == GIMPLE_SINGLE_RHS
);
1890 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
))
1891 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
1892 expand_assignment (lhs
, rhs
,
1893 gimple_assign_nontemporal_move_p (stmt
));
1898 bool nontemporal
= gimple_assign_nontemporal_move_p (stmt
);
1899 struct separate_ops ops
;
1900 bool promoted
= false;
1902 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
1903 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
1906 ops
.code
= gimple_assign_rhs_code (stmt
);
1907 ops
.type
= TREE_TYPE (lhs
);
1908 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
1910 case GIMPLE_TERNARY_RHS
:
1911 ops
.op2
= gimple_assign_rhs3 (stmt
);
1913 case GIMPLE_BINARY_RHS
:
1914 ops
.op1
= gimple_assign_rhs2 (stmt
);
1916 case GIMPLE_UNARY_RHS
:
1917 ops
.op0
= gimple_assign_rhs1 (stmt
);
1922 ops
.location
= gimple_location (stmt
);
1924 /* If we want to use a nontemporal store, force the value to
1925 register first. If we store into a promoted register,
1926 don't directly expand to target. */
1927 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
1928 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
1935 int unsignedp
= SUBREG_PROMOTED_UNSIGNED_P (target
);
1936 /* If TEMP is a VOIDmode constant, use convert_modes to make
1937 sure that we properly convert it. */
1938 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
1940 temp
= convert_modes (GET_MODE (target
),
1941 TYPE_MODE (ops
.type
),
1943 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
1944 GET_MODE (target
), temp
, unsignedp
);
1947 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
1949 else if (nontemporal
&& emit_storent_insn (target
, temp
))
1953 temp
= force_operand (temp
, target
);
1955 emit_move_insn (target
, temp
);
1966 /* Expand one gimple statement STMT and return the last RTL instruction
1967 before any of the newly generated ones.
1969 In addition to generating the necessary RTL instructions this also
1970 sets REG_EH_REGION notes if necessary and sets the current source
1971 location for diagnostics. */
1974 expand_gimple_stmt (gimple stmt
)
1978 location_t saved_location
= input_location
;
1980 last
= get_last_insn ();
1982 /* If this is an expression of some kind and it has an associated line
1983 number, then emit the line number before expanding the expression.
1985 We need to save and restore the file and line information so that
1986 errors discovered during expansion are emitted with the right
1987 information. It would be better of the diagnostic routines
1988 used the file/line information embedded in the tree nodes rather
1992 if (gimple_has_location (stmt
))
1994 input_location
= gimple_location (stmt
);
1995 set_curr_insn_source_location (input_location
);
1997 /* Record where the insns produced belong. */
1998 set_curr_insn_block (gimple_block (stmt
));
2001 expand_gimple_stmt_1 (stmt
);
2002 /* Free any temporaries used to evaluate this statement. */
2005 input_location
= saved_location
;
2007 /* Mark all insns that may trap. */
2008 lp_nr
= lookup_stmt_eh_lp (stmt
);
2012 for (insn
= next_real_insn (last
); insn
;
2013 insn
= next_real_insn (insn
))
2015 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2016 /* If we want exceptions for non-call insns, any
2017 may_trap_p instruction may throw. */
2018 && GET_CODE (PATTERN (insn
)) != CLOBBER
2019 && GET_CODE (PATTERN (insn
)) != USE
2020 && insn_could_throw_p (insn
))
2021 make_reg_eh_region_note (insn
, 0, lp_nr
);
2028 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2029 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2030 generated a tail call (something that might be denied by the ABI
2031 rules governing the call; see calls.c).
2033 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2034 can still reach the rest of BB. The case here is __builtin_sqrt,
2035 where the NaN result goes through the external function (with a
2036 tailcall) and the normal result happens via a sqrt instruction. */
2039 expand_gimple_tailcall (basic_block bb
, gimple stmt
, bool *can_fallthru
)
2047 last2
= last
= expand_gimple_stmt (stmt
);
2049 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
2050 if (CALL_P (last
) && SIBLING_CALL_P (last
))
2053 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2055 *can_fallthru
= true;
2059 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2060 Any instructions emitted here are about to be deleted. */
2061 do_pending_stack_adjust ();
2063 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2064 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2065 EH or abnormal edges, we shouldn't have created a tail call in
2066 the first place. So it seems to me we should just be removing
2067 all edges here, or redirecting the existing fallthru edge to
2073 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2075 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
2077 if (e
->dest
!= EXIT_BLOCK_PTR
)
2079 e
->dest
->count
-= e
->count
;
2080 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
2081 if (e
->dest
->count
< 0)
2083 if (e
->dest
->frequency
< 0)
2084 e
->dest
->frequency
= 0;
2087 probability
+= e
->probability
;
2094 /* This is somewhat ugly: the call_expr expander often emits instructions
2095 after the sibcall (to perform the function return). These confuse the
2096 find_many_sub_basic_blocks code, so we need to get rid of these. */
2097 last
= NEXT_INSN (last
);
2098 gcc_assert (BARRIER_P (last
));
2100 *can_fallthru
= false;
2101 while (NEXT_INSN (last
))
2103 /* For instance an sqrt builtin expander expands if with
2104 sibcall in the then and label for `else`. */
2105 if (LABEL_P (NEXT_INSN (last
)))
2107 *can_fallthru
= true;
2110 delete_insn (NEXT_INSN (last
));
2113 e
= make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_ABNORMAL
| EDGE_SIBCALL
);
2114 e
->probability
+= probability
;
2117 update_bb_for_insn (bb
);
2119 if (NEXT_INSN (last
))
2121 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2124 if (BARRIER_P (last
))
2125 BB_END (bb
) = PREV_INSN (last
);
2128 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2133 /* Return the difference between the floor and the truncated result of
2134 a signed division by OP1 with remainder MOD. */
2136 floor_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2138 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2139 return gen_rtx_IF_THEN_ELSE
2140 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2141 gen_rtx_IF_THEN_ELSE
2142 (mode
, gen_rtx_LT (BImode
,
2143 gen_rtx_DIV (mode
, op1
, mod
),
2145 constm1_rtx
, const0_rtx
),
2149 /* Return the difference between the ceil and the truncated result of
2150 a signed division by OP1 with remainder MOD. */
2152 ceil_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2154 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2155 return gen_rtx_IF_THEN_ELSE
2156 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2157 gen_rtx_IF_THEN_ELSE
2158 (mode
, gen_rtx_GT (BImode
,
2159 gen_rtx_DIV (mode
, op1
, mod
),
2161 const1_rtx
, const0_rtx
),
2165 /* Return the difference between the ceil and the truncated result of
2166 an unsigned division by OP1 with remainder MOD. */
2168 ceil_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
2170 /* (mod != 0 ? 1 : 0) */
2171 return gen_rtx_IF_THEN_ELSE
2172 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2173 const1_rtx
, const0_rtx
);
2176 /* Return the difference between the rounded and the truncated result
2177 of a signed division by OP1 with remainder MOD. Halfway cases are
2178 rounded away from zero, rather than to the nearest even number. */
2180 round_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2182 /* (abs (mod) >= abs (op1) - abs (mod)
2183 ? (op1 / mod > 0 ? 1 : -1)
2185 return gen_rtx_IF_THEN_ELSE
2186 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
2187 gen_rtx_MINUS (mode
,
2188 gen_rtx_ABS (mode
, op1
),
2189 gen_rtx_ABS (mode
, mod
))),
2190 gen_rtx_IF_THEN_ELSE
2191 (mode
, gen_rtx_GT (BImode
,
2192 gen_rtx_DIV (mode
, op1
, mod
),
2194 const1_rtx
, constm1_rtx
),
2198 /* Return the difference between the rounded and the truncated result
2199 of a unsigned division by OP1 with remainder MOD. Halfway cases
2200 are rounded away from zero, rather than to the nearest even
2203 round_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2205 /* (mod >= op1 - mod ? 1 : 0) */
2206 return gen_rtx_IF_THEN_ELSE
2207 (mode
, gen_rtx_GE (BImode
, mod
,
2208 gen_rtx_MINUS (mode
, op1
, mod
)),
2209 const1_rtx
, const0_rtx
);
2212 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2216 convert_debug_memory_address (enum machine_mode mode
, rtx x
)
2218 enum machine_mode xmode
= GET_MODE (x
);
2220 #ifndef POINTERS_EXTEND_UNSIGNED
2221 gcc_assert (mode
== Pmode
);
2222 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
2224 gcc_assert (mode
== Pmode
|| mode
== ptr_mode
);
2226 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
2229 if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (xmode
))
2230 x
= simplify_gen_subreg (mode
, x
, xmode
,
2231 subreg_lowpart_offset
2233 else if (POINTERS_EXTEND_UNSIGNED
> 0)
2234 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
2235 else if (!POINTERS_EXTEND_UNSIGNED
)
2236 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
2239 #endif /* POINTERS_EXTEND_UNSIGNED */
2244 /* Return an RTX equivalent to the value of the tree expression
2248 expand_debug_expr (tree exp
)
2250 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
2251 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
2252 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
2255 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
2257 case tcc_expression
:
2258 switch (TREE_CODE (exp
))
2262 case WIDEN_MULT_PLUS_EXPR
:
2263 case WIDEN_MULT_MINUS_EXPR
:
2266 case TRUTH_ANDIF_EXPR
:
2267 case TRUTH_ORIF_EXPR
:
2268 case TRUTH_AND_EXPR
:
2270 case TRUTH_XOR_EXPR
:
2273 case TRUTH_NOT_EXPR
:
2282 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
2289 case tcc_comparison
:
2290 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2297 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2307 case tcc_exceptional
:
2308 case tcc_declaration
:
2314 switch (TREE_CODE (exp
))
2317 if (!lookup_constant_def (exp
))
2319 if (strlen (TREE_STRING_POINTER (exp
)) + 1
2320 != (size_t) TREE_STRING_LENGTH (exp
))
2322 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
2323 op0
= gen_rtx_MEM (BLKmode
, op0
);
2324 set_mem_attributes (op0
, exp
, 0);
2327 /* Fall through... */
2332 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
2336 gcc_assert (COMPLEX_MODE_P (mode
));
2337 op0
= expand_debug_expr (TREE_REALPART (exp
));
2338 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
2339 return gen_rtx_CONCAT (mode
, op0
, op1
);
2341 case DEBUG_EXPR_DECL
:
2342 op0
= DECL_RTL_IF_SET (exp
);
2347 op0
= gen_rtx_DEBUG_EXPR (mode
);
2348 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
2349 SET_DECL_RTL (exp
, op0
);
2359 op0
= DECL_RTL_IF_SET (exp
);
2361 /* This decl was probably optimized away. */
2364 if (TREE_CODE (exp
) != VAR_DECL
2365 || DECL_EXTERNAL (exp
)
2366 || !TREE_STATIC (exp
)
2368 || DECL_HARD_REGISTER (exp
)
2369 || mode
== VOIDmode
)
2372 op0
= make_decl_rtl_for_debug (exp
);
2374 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
2375 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
2379 op0
= copy_rtx (op0
);
2381 if (GET_MODE (op0
) == BLKmode
2382 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2383 below would ICE. While it is likely a FE bug,
2384 try to be robust here. See PR43166. */
2386 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
2388 gcc_assert (MEM_P (op0
));
2389 op0
= adjust_address_nv (op0
, mode
, 0);
2400 enum machine_mode inner_mode
= GET_MODE (op0
);
2402 if (mode
== inner_mode
)
2405 if (inner_mode
== VOIDmode
)
2407 if (TREE_CODE (exp
) == SSA_NAME
)
2408 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
2410 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2411 if (mode
== inner_mode
)
2415 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
2417 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
2418 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
2419 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
2420 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
2422 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
2424 else if (FLOAT_MODE_P (mode
))
2426 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
2427 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
2428 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
2430 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
2432 else if (FLOAT_MODE_P (inner_mode
))
2435 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
2437 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
2439 else if (CONSTANT_P (op0
)
2440 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
2441 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
2442 subreg_lowpart_offset (mode
,
2444 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == tcc_unary
2445 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
2447 op0
= gen_rtx_ZERO_EXTEND (mode
, op0
);
2449 op0
= gen_rtx_SIGN_EXTEND (mode
, op0
);
2456 if (!integer_zerop (TREE_OPERAND (exp
, 1)))
2460 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2464 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2465 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2467 as
= ADDR_SPACE_GENERIC
;
2469 op0
= gen_rtx_MEM (mode
, op0
);
2471 set_mem_attributes (op0
, exp
, 0);
2472 set_mem_addr_space (op0
, as
);
2476 case TARGET_MEM_REF
:
2477 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
2478 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
2481 op0
= expand_debug_expr
2482 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
2486 as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
2488 op0
= gen_rtx_MEM (mode
, op0
);
2490 set_mem_attributes (op0
, exp
, 0);
2491 set_mem_addr_space (op0
, as
);
2496 case ARRAY_RANGE_REF
:
2501 case VIEW_CONVERT_EXPR
:
2503 enum machine_mode mode1
;
2504 HOST_WIDE_INT bitsize
, bitpos
;
2507 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
2508 &mode1
, &unsignedp
, &volatilep
, false);
2514 orig_op0
= op0
= expand_debug_expr (tem
);
2521 enum machine_mode addrmode
, offmode
;
2526 op0
= XEXP (op0
, 0);
2527 addrmode
= GET_MODE (op0
);
2528 if (addrmode
== VOIDmode
)
2531 op1
= expand_debug_expr (offset
);
2535 offmode
= GET_MODE (op1
);
2536 if (offmode
== VOIDmode
)
2537 offmode
= TYPE_MODE (TREE_TYPE (offset
));
2539 if (addrmode
!= offmode
)
2540 op1
= simplify_gen_subreg (addrmode
, op1
, offmode
,
2541 subreg_lowpart_offset (addrmode
,
2544 /* Don't use offset_address here, we don't need a
2545 recognizable address, and we don't want to generate
2547 op0
= gen_rtx_MEM (mode
, gen_rtx_PLUS (addrmode
, op0
, op1
));
2552 if (mode1
== VOIDmode
)
2554 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
2555 if (bitpos
>= BITS_PER_UNIT
)
2557 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
2558 bitpos
%= BITS_PER_UNIT
;
2560 else if (bitpos
< 0)
2563 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
2564 op0
= adjust_address_nv (op0
, mode1
, units
);
2565 bitpos
+= units
* BITS_PER_UNIT
;
2567 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
2568 op0
= adjust_address_nv (op0
, mode
, 0);
2569 else if (GET_MODE (op0
) != mode1
)
2570 op0
= adjust_address_nv (op0
, mode1
, 0);
2572 op0
= copy_rtx (op0
);
2573 if (op0
== orig_op0
)
2574 op0
= shallow_copy_rtx (op0
);
2575 set_mem_attributes (op0
, exp
, 0);
2578 if (bitpos
== 0 && mode
== GET_MODE (op0
))
2584 if (GET_MODE (op0
) == BLKmode
)
2587 if ((bitpos
% BITS_PER_UNIT
) == 0
2588 && bitsize
== GET_MODE_BITSIZE (mode1
))
2590 enum machine_mode opmode
= GET_MODE (op0
);
2592 if (opmode
== VOIDmode
)
2595 /* This condition may hold if we're expanding the address
2596 right past the end of an array that turned out not to
2597 be addressable (i.e., the address was only computed in
2598 debug stmts). The gen_subreg below would rightfully
2599 crash, and the address doesn't really exist, so just
2601 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
2604 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
2605 return simplify_gen_subreg (mode
, op0
, opmode
,
2606 bitpos
/ BITS_PER_UNIT
);
2609 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
2610 && TYPE_UNSIGNED (TREE_TYPE (exp
))
2612 : ZERO_EXTRACT
, mode
,
2613 GET_MODE (op0
) != VOIDmode
2614 ? GET_MODE (op0
) : mode1
,
2615 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
2619 return gen_rtx_ABS (mode
, op0
);
2622 return gen_rtx_NEG (mode
, op0
);
2625 return gen_rtx_NOT (mode
, op0
);
2629 return gen_rtx_UNSIGNED_FLOAT (mode
, op0
);
2631 return gen_rtx_FLOAT (mode
, op0
);
2633 case FIX_TRUNC_EXPR
:
2635 return gen_rtx_UNSIGNED_FIX (mode
, op0
);
2637 return gen_rtx_FIX (mode
, op0
);
2639 case POINTER_PLUS_EXPR
:
2640 /* For the rare target where pointers are not the same size as
2641 size_t, we need to check for mis-matched modes and correct
2644 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
2645 && GET_MODE (op0
) != GET_MODE (op1
))
2647 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
)))
2648 op1
= gen_rtx_TRUNCATE (GET_MODE (op0
), op1
);
2650 /* We always sign-extend, regardless of the signedness of
2651 the operand, because the operand is always unsigned
2652 here even if the original C expression is signed. */
2653 op1
= gen_rtx_SIGN_EXTEND (GET_MODE (op0
), op1
);
2657 return gen_rtx_PLUS (mode
, op0
, op1
);
2660 return gen_rtx_MINUS (mode
, op0
, op1
);
2663 return gen_rtx_MULT (mode
, op0
, op1
);
2666 case TRUNC_DIV_EXPR
:
2667 case EXACT_DIV_EXPR
:
2669 return gen_rtx_UDIV (mode
, op0
, op1
);
2671 return gen_rtx_DIV (mode
, op0
, op1
);
2673 case TRUNC_MOD_EXPR
:
2675 return gen_rtx_UMOD (mode
, op0
, op1
);
2677 return gen_rtx_MOD (mode
, op0
, op1
);
2679 case FLOOR_DIV_EXPR
:
2681 return gen_rtx_UDIV (mode
, op0
, op1
);
2684 rtx div
= gen_rtx_DIV (mode
, op0
, op1
);
2685 rtx mod
= gen_rtx_MOD (mode
, op0
, op1
);
2686 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
2687 return gen_rtx_PLUS (mode
, div
, adj
);
2690 case FLOOR_MOD_EXPR
:
2692 return gen_rtx_UMOD (mode
, op0
, op1
);
2695 rtx mod
= gen_rtx_MOD (mode
, op0
, op1
);
2696 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
2697 adj
= gen_rtx_NEG (mode
, gen_rtx_MULT (mode
, adj
, op1
));
2698 return gen_rtx_PLUS (mode
, mod
, adj
);
2704 rtx div
= gen_rtx_UDIV (mode
, op0
, op1
);
2705 rtx mod
= gen_rtx_UMOD (mode
, op0
, op1
);
2706 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
2707 return gen_rtx_PLUS (mode
, div
, adj
);
2711 rtx div
= gen_rtx_DIV (mode
, op0
, op1
);
2712 rtx mod
= gen_rtx_MOD (mode
, op0
, op1
);
2713 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
2714 return gen_rtx_PLUS (mode
, div
, adj
);
2720 rtx mod
= gen_rtx_UMOD (mode
, op0
, op1
);
2721 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
2722 adj
= gen_rtx_NEG (mode
, gen_rtx_MULT (mode
, adj
, op1
));
2723 return gen_rtx_PLUS (mode
, mod
, adj
);
2727 rtx mod
= gen_rtx_MOD (mode
, op0
, op1
);
2728 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
2729 adj
= gen_rtx_NEG (mode
, gen_rtx_MULT (mode
, adj
, op1
));
2730 return gen_rtx_PLUS (mode
, mod
, adj
);
2733 case ROUND_DIV_EXPR
:
2736 rtx div
= gen_rtx_UDIV (mode
, op0
, op1
);
2737 rtx mod
= gen_rtx_UMOD (mode
, op0
, op1
);
2738 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
2739 return gen_rtx_PLUS (mode
, div
, adj
);
2743 rtx div
= gen_rtx_DIV (mode
, op0
, op1
);
2744 rtx mod
= gen_rtx_MOD (mode
, op0
, op1
);
2745 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
2746 return gen_rtx_PLUS (mode
, div
, adj
);
2749 case ROUND_MOD_EXPR
:
2752 rtx mod
= gen_rtx_UMOD (mode
, op0
, op1
);
2753 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
2754 adj
= gen_rtx_NEG (mode
, gen_rtx_MULT (mode
, adj
, op1
));
2755 return gen_rtx_PLUS (mode
, mod
, adj
);
2759 rtx mod
= gen_rtx_MOD (mode
, op0
, op1
);
2760 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
2761 adj
= gen_rtx_NEG (mode
, gen_rtx_MULT (mode
, adj
, op1
));
2762 return gen_rtx_PLUS (mode
, mod
, adj
);
2766 return gen_rtx_ASHIFT (mode
, op0
, op1
);
2770 return gen_rtx_LSHIFTRT (mode
, op0
, op1
);
2772 return gen_rtx_ASHIFTRT (mode
, op0
, op1
);
2775 return gen_rtx_ROTATE (mode
, op0
, op1
);
2778 return gen_rtx_ROTATERT (mode
, op0
, op1
);
2782 return gen_rtx_UMIN (mode
, op0
, op1
);
2784 return gen_rtx_SMIN (mode
, op0
, op1
);
2788 return gen_rtx_UMAX (mode
, op0
, op1
);
2790 return gen_rtx_SMAX (mode
, op0
, op1
);
2793 case TRUTH_AND_EXPR
:
2794 return gen_rtx_AND (mode
, op0
, op1
);
2798 return gen_rtx_IOR (mode
, op0
, op1
);
2801 case TRUTH_XOR_EXPR
:
2802 return gen_rtx_XOR (mode
, op0
, op1
);
2804 case TRUTH_ANDIF_EXPR
:
2805 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
2807 case TRUTH_ORIF_EXPR
:
2808 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
2810 case TRUTH_NOT_EXPR
:
2811 return gen_rtx_EQ (mode
, op0
, const0_rtx
);
2815 return gen_rtx_LTU (mode
, op0
, op1
);
2817 return gen_rtx_LT (mode
, op0
, op1
);
2821 return gen_rtx_LEU (mode
, op0
, op1
);
2823 return gen_rtx_LE (mode
, op0
, op1
);
2827 return gen_rtx_GTU (mode
, op0
, op1
);
2829 return gen_rtx_GT (mode
, op0
, op1
);
2833 return gen_rtx_GEU (mode
, op0
, op1
);
2835 return gen_rtx_GE (mode
, op0
, op1
);
2838 return gen_rtx_EQ (mode
, op0
, op1
);
2841 return gen_rtx_NE (mode
, op0
, op1
);
2843 case UNORDERED_EXPR
:
2844 return gen_rtx_UNORDERED (mode
, op0
, op1
);
2847 return gen_rtx_ORDERED (mode
, op0
, op1
);
2850 return gen_rtx_UNLT (mode
, op0
, op1
);
2853 return gen_rtx_UNLE (mode
, op0
, op1
);
2856 return gen_rtx_UNGT (mode
, op0
, op1
);
2859 return gen_rtx_UNGE (mode
, op0
, op1
);
2862 return gen_rtx_UNEQ (mode
, op0
, op1
);
2865 return gen_rtx_LTGT (mode
, op0
, op1
);
2868 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
2871 gcc_assert (COMPLEX_MODE_P (mode
));
2872 if (GET_MODE (op0
) == VOIDmode
)
2873 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
2874 if (GET_MODE (op1
) == VOIDmode
)
2875 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
2876 return gen_rtx_CONCAT (mode
, op0
, op1
);
2879 if (GET_CODE (op0
) == CONCAT
)
2880 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
2881 gen_rtx_NEG (GET_MODE_INNER (mode
),
2885 enum machine_mode imode
= GET_MODE_INNER (mode
);
2890 re
= adjust_address_nv (op0
, imode
, 0);
2891 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
2895 enum machine_mode ifmode
= int_mode_for_mode (mode
);
2896 enum machine_mode ihmode
= int_mode_for_mode (imode
);
2898 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
2900 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
2903 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
2904 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
2905 if (imode
!= ihmode
)
2906 re
= gen_rtx_SUBREG (imode
, re
, 0);
2907 im
= copy_rtx (op0
);
2909 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
2910 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
2911 if (imode
!= ihmode
)
2912 im
= gen_rtx_SUBREG (imode
, im
, 0);
2914 im
= gen_rtx_NEG (imode
, im
);
2915 return gen_rtx_CONCAT (mode
, re
, im
);
2919 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2920 if (!op0
|| !MEM_P (op0
))
2922 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
2923 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
2924 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
2925 && !TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0)))
2926 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
2928 if (handled_component_p (TREE_OPERAND (exp
, 0)))
2930 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
2932 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
2933 &bitoffset
, &bitsize
, &maxsize
);
2934 if ((TREE_CODE (decl
) == VAR_DECL
2935 || TREE_CODE (decl
) == PARM_DECL
2936 || TREE_CODE (decl
) == RESULT_DECL
)
2937 && !TREE_ADDRESSABLE (decl
)
2938 && (bitoffset
% BITS_PER_UNIT
) == 0
2940 && bitsize
== maxsize
)
2941 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
),
2942 bitoffset
/ BITS_PER_UNIT
);
2948 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0));
2953 exp
= build_constructor_from_list (TREE_TYPE (exp
),
2954 TREE_VECTOR_CST_ELTS (exp
));
2958 if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
2963 op0
= gen_rtx_CONCATN
2964 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
2966 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
2968 op1
= expand_debug_expr (val
);
2971 XVECEXP (op0
, 0, i
) = op1
;
2974 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
2976 op1
= expand_debug_expr
2977 (fold_convert (TREE_TYPE (TREE_TYPE (exp
)), integer_zero_node
));
2982 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
2983 XVECEXP (op0
, 0, i
) = op1
;
2989 goto flag_unsupported
;
2992 /* ??? Maybe handle some builtins? */
2997 gimple g
= get_gimple_for_ssa_name (exp
);
3000 op0
= expand_debug_expr (gimple_assign_rhs_to_tree (g
));
3006 int part
= var_to_partition (SA
.map
, exp
);
3008 if (part
== NO_PARTITION
)
3011 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
3013 op0
= SA
.partition_to_pseudo
[part
];
3021 /* Vector stuff. For most of the codes we don't have rtl codes. */
3022 case REALIGN_LOAD_EXPR
:
3023 case REDUC_MAX_EXPR
:
3024 case REDUC_MIN_EXPR
:
3025 case REDUC_PLUS_EXPR
:
3027 case VEC_EXTRACT_EVEN_EXPR
:
3028 case VEC_EXTRACT_ODD_EXPR
:
3029 case VEC_INTERLEAVE_HIGH_EXPR
:
3030 case VEC_INTERLEAVE_LOW_EXPR
:
3031 case VEC_LSHIFT_EXPR
:
3032 case VEC_PACK_FIX_TRUNC_EXPR
:
3033 case VEC_PACK_SAT_EXPR
:
3034 case VEC_PACK_TRUNC_EXPR
:
3035 case VEC_RSHIFT_EXPR
:
3036 case VEC_UNPACK_FLOAT_HI_EXPR
:
3037 case VEC_UNPACK_FLOAT_LO_EXPR
:
3038 case VEC_UNPACK_HI_EXPR
:
3039 case VEC_UNPACK_LO_EXPR
:
3040 case VEC_WIDEN_MULT_HI_EXPR
:
3041 case VEC_WIDEN_MULT_LO_EXPR
:
3045 case ADDR_SPACE_CONVERT_EXPR
:
3046 case FIXED_CONVERT_EXPR
:
3048 case WITH_SIZE_EXPR
:
3052 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3053 && SCALAR_INT_MODE_P (mode
))
3055 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3056 op0
= gen_rtx_ZERO_EXTEND (mode
, op0
);
3058 op0
= gen_rtx_SIGN_EXTEND (mode
, op0
);
3059 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
3060 op1
= gen_rtx_ZERO_EXTEND (mode
, op1
);
3062 op1
= gen_rtx_SIGN_EXTEND (mode
, op1
);
3063 op0
= gen_rtx_MULT (mode
, op0
, op1
);
3064 return gen_rtx_PLUS (mode
, op0
, op2
);
3068 case WIDEN_MULT_EXPR
:
3069 case WIDEN_MULT_PLUS_EXPR
:
3070 case WIDEN_MULT_MINUS_EXPR
:
3071 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3072 && SCALAR_INT_MODE_P (mode
))
3074 enum machine_mode inner_mode
= GET_MODE (op0
);
3075 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3076 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3078 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3079 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
3080 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
3082 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
3083 op0
= gen_rtx_MULT (mode
, op0
, op1
);
3084 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
3086 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
3087 return gen_rtx_PLUS (mode
, op0
, op2
);
3089 return gen_rtx_MINUS (mode
, op2
, op0
);
3093 case WIDEN_SUM_EXPR
:
3094 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3095 && SCALAR_INT_MODE_P (mode
))
3097 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3098 op0
= gen_rtx_ZERO_EXTEND (mode
, op0
);
3100 op0
= gen_rtx_SIGN_EXTEND (mode
, op0
);
3101 return gen_rtx_PLUS (mode
, op0
, op1
);
3107 #ifdef ENABLE_CHECKING
3116 /* Expand the _LOCs in debug insns. We run this after expanding all
3117 regular insns, so that any variables referenced in the function
3118 will have their DECL_RTLs set. */
3121 expand_debug_locations (void)
3124 rtx last
= get_last_insn ();
3125 int save_strict_alias
= flag_strict_aliasing
;
3127 /* New alias sets while setting up memory attributes cause
3128 -fcompare-debug failures, even though it doesn't bring about any
3130 flag_strict_aliasing
= 0;
3132 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3133 if (DEBUG_INSN_P (insn
))
3135 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
3137 enum machine_mode mode
;
3139 if (value
== NULL_TREE
)
3143 val
= expand_debug_expr (value
);
3144 gcc_assert (last
== get_last_insn ());
3148 val
= gen_rtx_UNKNOWN_VAR_LOC ();
3151 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
3153 gcc_assert (mode
== GET_MODE (val
)
3154 || (GET_MODE (val
) == VOIDmode
3155 && (CONST_INT_P (val
)
3156 || GET_CODE (val
) == CONST_FIXED
3157 || GET_CODE (val
) == CONST_DOUBLE
3158 || GET_CODE (val
) == LABEL_REF
)));
3161 INSN_VAR_LOCATION_LOC (insn
) = val
;
3164 flag_strict_aliasing
= save_strict_alias
;
3167 /* Expand basic block BB from GIMPLE trees to RTL. */
3170 expand_gimple_basic_block (basic_block bb
)
3172 gimple_stmt_iterator gsi
;
3181 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
3184 /* Note that since we are now transitioning from GIMPLE to RTL, we
3185 cannot use the gsi_*_bb() routines because they expect the basic
3186 block to be in GIMPLE, instead of RTL. Therefore, we need to
3187 access the BB sequence directly. */
3188 stmts
= bb_seq (bb
);
3189 bb
->il
.gimple
= NULL
;
3190 rtl_profile_for_bb (bb
);
3191 init_rtl_bb_info (bb
);
3192 bb
->flags
|= BB_RTL
;
3194 /* Remove the RETURN_EXPR if we may fall though to the exit
3196 gsi
= gsi_last (stmts
);
3197 if (!gsi_end_p (gsi
)
3198 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
3200 gimple ret_stmt
= gsi_stmt (gsi
);
3202 gcc_assert (single_succ_p (bb
));
3203 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR
);
3205 if (bb
->next_bb
== EXIT_BLOCK_PTR
3206 && !gimple_return_retval (ret_stmt
))
3208 gsi_remove (&gsi
, false);
3209 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
3213 gsi
= gsi_start (stmts
);
3214 if (!gsi_end_p (gsi
))
3216 stmt
= gsi_stmt (gsi
);
3217 if (gimple_code (stmt
) != GIMPLE_LABEL
)
3221 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
3225 last
= get_last_insn ();
3229 expand_gimple_stmt (stmt
);
3234 emit_label ((rtx
) *elt
);
3236 /* Java emits line number notes in the top of labels.
3237 ??? Make this go away once line number notes are obsoleted. */
3238 BB_HEAD (bb
) = NEXT_INSN (last
);
3239 if (NOTE_P (BB_HEAD (bb
)))
3240 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
3241 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
3243 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3246 note
= BB_HEAD (bb
) = emit_note (NOTE_INSN_BASIC_BLOCK
);
3248 NOTE_BASIC_BLOCK (note
) = bb
;
3250 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3254 stmt
= gsi_stmt (gsi
);
3256 /* If this statement is a non-debug one, and we generate debug
3257 insns, then this one might be the last real use of a TERed
3258 SSA_NAME, but where there are still some debug uses further
3259 down. Expanding the current SSA name in such further debug
3260 uses by their RHS might lead to wrong debug info, as coalescing
3261 might make the operands of such RHS be placed into the same
3262 pseudo as something else. Like so:
3263 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3267 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3268 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3269 the write to a_2 would actually have clobbered the place which
3272 So, instead of that, we recognize the situation, and generate
3273 debug temporaries at the last real use of TERed SSA names:
3280 if (MAY_HAVE_DEBUG_INSNS
3282 && !is_gimple_debug (stmt
))
3288 location_t sloc
= get_curr_insn_source_location ();
3289 tree sblock
= get_curr_insn_block ();
3291 /* Look for SSA names that have their last use here (TERed
3292 names always have only one real use). */
3293 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3294 if ((def
= get_gimple_for_ssa_name (op
)))
3296 imm_use_iterator imm_iter
;
3297 use_operand_p use_p
;
3298 bool have_debug_uses
= false;
3300 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
3302 if (gimple_debug_bind_p (USE_STMT (use_p
)))
3304 have_debug_uses
= true;
3309 if (have_debug_uses
)
3311 /* OP is a TERed SSA name, with DEF it's defining
3312 statement, and where OP is used in further debug
3313 instructions. Generate a debug temporary, and
3314 replace all uses of OP in debug insns with that
3317 tree value
= gimple_assign_rhs_to_tree (def
);
3318 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
3320 enum machine_mode mode
;
3322 set_curr_insn_source_location (gimple_location (def
));
3323 set_curr_insn_block (gimple_block (def
));
3325 DECL_ARTIFICIAL (vexpr
) = 1;
3326 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
3328 mode
= DECL_MODE (value
);
3330 mode
= TYPE_MODE (TREE_TYPE (value
));
3331 DECL_MODE (vexpr
) = mode
;
3333 val
= gen_rtx_VAR_LOCATION
3334 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3336 val
= emit_debug_insn (val
);
3338 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
3340 if (!gimple_debug_bind_p (debugstmt
))
3343 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
3344 SET_USE (use_p
, vexpr
);
3346 update_stmt (debugstmt
);
3350 set_curr_insn_source_location (sloc
);
3351 set_curr_insn_block (sblock
);
3354 currently_expanding_gimple_stmt
= stmt
;
3356 /* Expand this statement, then evaluate the resulting RTL and
3357 fixup the CFG accordingly. */
3358 if (gimple_code (stmt
) == GIMPLE_COND
)
3360 new_bb
= expand_gimple_cond (bb
, stmt
);
3364 else if (gimple_debug_bind_p (stmt
))
3366 location_t sloc
= get_curr_insn_source_location ();
3367 tree sblock
= get_curr_insn_block ();
3368 gimple_stmt_iterator nsi
= gsi
;
3372 tree var
= gimple_debug_bind_get_var (stmt
);
3375 enum machine_mode mode
;
3377 if (gimple_debug_bind_has_value_p (stmt
))
3378 value
= gimple_debug_bind_get_value (stmt
);
3382 last
= get_last_insn ();
3384 set_curr_insn_source_location (gimple_location (stmt
));
3385 set_curr_insn_block (gimple_block (stmt
));
3388 mode
= DECL_MODE (var
);
3390 mode
= TYPE_MODE (TREE_TYPE (var
));
3392 val
= gen_rtx_VAR_LOCATION
3393 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3395 val
= emit_debug_insn (val
);
3397 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3399 /* We can't dump the insn with a TREE where an RTX
3401 INSN_VAR_LOCATION_LOC (val
) = const0_rtx
;
3402 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3403 INSN_VAR_LOCATION_LOC (val
) = (rtx
)value
;
3406 /* In order not to generate too many debug temporaries,
3407 we delink all uses of debug statements we already expanded.
3408 Therefore debug statements between definition and real
3409 use of TERed SSA names will continue to use the SSA name,
3410 and not be replaced with debug temps. */
3411 delink_stmt_imm_use (stmt
);
3415 if (gsi_end_p (nsi
))
3417 stmt
= gsi_stmt (nsi
);
3418 if (!gimple_debug_bind_p (stmt
))
3422 set_curr_insn_source_location (sloc
);
3423 set_curr_insn_block (sblock
);
3427 if (is_gimple_call (stmt
) && gimple_call_tail_p (stmt
))
3430 new_bb
= expand_gimple_tailcall (bb
, stmt
, &can_fallthru
);
3441 def_operand_p def_p
;
3442 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
3446 /* Ignore this stmt if it is in the list of
3447 replaceable expressions. */
3449 && bitmap_bit_p (SA
.values
,
3450 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
3453 last
= expand_gimple_stmt (stmt
);
3454 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3459 currently_expanding_gimple_stmt
= NULL
;
3461 /* Expand implicit goto and convert goto_locus. */
3462 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3464 if (e
->goto_locus
&& e
->goto_block
)
3466 set_curr_insn_source_location (e
->goto_locus
);
3467 set_curr_insn_block (e
->goto_block
);
3468 e
->goto_locus
= curr_insn_locator ();
3470 e
->goto_block
= NULL
;
3471 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
3473 emit_jump (label_rtx_for_bb (e
->dest
));
3474 e
->flags
&= ~EDGE_FALLTHRU
;
3478 /* Expanded RTL can create a jump in the last instruction of block.
3479 This later might be assumed to be a jump to successor and break edge insertion.
3480 We need to insert dummy move to prevent this. PR41440. */
3481 if (single_succ_p (bb
)
3482 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
3483 && (last
= get_last_insn ())
3486 rtx dummy
= gen_reg_rtx (SImode
);
3487 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
3490 do_pending_stack_adjust ();
3492 /* Find the block tail. The last insn in the block is the insn
3493 before a barrier and/or table jump insn. */
3494 last
= get_last_insn ();
3495 if (BARRIER_P (last
))
3496 last
= PREV_INSN (last
);
3497 if (JUMP_TABLE_DATA_P (last
))
3498 last
= PREV_INSN (PREV_INSN (last
));
3501 update_bb_for_insn (bb
);
3507 /* Create a basic block for initialization code. */
3510 construct_init_block (void)
3512 basic_block init_block
, first_block
;
3516 /* Multiple entry points not supported yet. */
3517 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR
->succs
) == 1);
3518 init_rtl_bb_info (ENTRY_BLOCK_PTR
);
3519 init_rtl_bb_info (EXIT_BLOCK_PTR
);
3520 ENTRY_BLOCK_PTR
->flags
|= BB_RTL
;
3521 EXIT_BLOCK_PTR
->flags
|= BB_RTL
;
3523 e
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0);
3525 /* When entry edge points to first basic block, we don't need jump,
3526 otherwise we have to jump into proper target. */
3527 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR
->next_bb
)
3529 tree label
= gimple_block_label (e
->dest
);
3531 emit_jump (label_rtx (label
));
3535 flags
= EDGE_FALLTHRU
;
3537 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
3540 init_block
->frequency
= ENTRY_BLOCK_PTR
->frequency
;
3541 init_block
->count
= ENTRY_BLOCK_PTR
->count
;
3544 first_block
= e
->dest
;
3545 redirect_edge_succ (e
, init_block
);
3546 e
= make_edge (init_block
, first_block
, flags
);
3549 e
= make_edge (init_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
3550 e
->probability
= REG_BR_PROB_BASE
;
3551 e
->count
= ENTRY_BLOCK_PTR
->count
;
3553 update_bb_for_insn (init_block
);
3557 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3558 found in the block tree. */
3561 set_block_levels (tree block
, int level
)
3565 BLOCK_NUMBER (block
) = level
;
3566 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
3567 block
= BLOCK_CHAIN (block
);
3571 /* Create a block containing landing pads and similar stuff. */
3574 construct_exit_block (void)
3576 rtx head
= get_last_insn ();
3578 basic_block exit_block
;
3582 rtx orig_end
= BB_END (EXIT_BLOCK_PTR
->prev_bb
);
3584 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
3586 /* Make sure the locus is set to the end of the function, so that
3587 epilogue line numbers and warnings are set properly. */
3588 if (cfun
->function_end_locus
!= UNKNOWN_LOCATION
)
3589 input_location
= cfun
->function_end_locus
;
3591 /* The following insns belong to the top scope. */
3592 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
3594 /* Generate rtl for function exit. */
3595 expand_function_end ();
3597 end
= get_last_insn ();
3600 /* While emitting the function end we could move end of the last basic block.
3602 BB_END (EXIT_BLOCK_PTR
->prev_bb
) = orig_end
;
3603 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
3604 head
= NEXT_INSN (head
);
3605 exit_block
= create_basic_block (NEXT_INSN (head
), end
,
3606 EXIT_BLOCK_PTR
->prev_bb
);
3607 exit_block
->frequency
= EXIT_BLOCK_PTR
->frequency
;
3608 exit_block
->count
= EXIT_BLOCK_PTR
->count
;
3611 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR
->preds
))
3613 e
= EDGE_PRED (EXIT_BLOCK_PTR
, ix
);
3614 if (!(e
->flags
& EDGE_ABNORMAL
))
3615 redirect_edge_succ (e
, exit_block
);
3620 e
= make_edge (exit_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
3621 e
->probability
= REG_BR_PROB_BASE
;
3622 e
->count
= EXIT_BLOCK_PTR
->count
;
3623 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR
->preds
)
3626 e
->count
-= e2
->count
;
3627 exit_block
->count
-= e2
->count
;
3628 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
3632 if (exit_block
->count
< 0)
3633 exit_block
->count
= 0;
3634 if (exit_block
->frequency
< 0)
3635 exit_block
->frequency
= 0;
3636 update_bb_for_insn (exit_block
);
3639 /* Helper function for discover_nonconstant_array_refs.
3640 Look for ARRAY_REF nodes with non-constant indexes and mark them
3644 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
3645 void *data ATTRIBUTE_UNUSED
)
3649 if (IS_TYPE_OR_DECL_P (t
))
3651 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3653 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3654 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
3655 && (!TREE_OPERAND (t
, 2)
3656 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
3657 || (TREE_CODE (t
) == COMPONENT_REF
3658 && (!TREE_OPERAND (t
,2)
3659 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
3660 || TREE_CODE (t
) == BIT_FIELD_REF
3661 || TREE_CODE (t
) == REALPART_EXPR
3662 || TREE_CODE (t
) == IMAGPART_EXPR
3663 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
3664 || CONVERT_EXPR_P (t
))
3665 t
= TREE_OPERAND (t
, 0);
3667 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
3669 t
= get_base_address (t
);
3671 && DECL_MODE (t
) != BLKmode
)
3672 TREE_ADDRESSABLE (t
) = 1;
3681 /* RTL expansion is not able to compile array references with variable
3682 offsets for arrays stored in single register. Discover such
3683 expressions and mark variables as addressable to avoid this
3687 discover_nonconstant_array_refs (void)
3690 gimple_stmt_iterator gsi
;
3693 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3695 gimple stmt
= gsi_stmt (gsi
);
3696 if (!is_gimple_debug (stmt
))
3697 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
3701 /* This function sets crtl->args.internal_arg_pointer to a virtual
3702 register if DRAP is needed. Local register allocator will replace
3703 virtual_incoming_args_rtx with the virtual register. */
3706 expand_stack_alignment (void)
3709 unsigned int preferred_stack_boundary
;
3711 if (! SUPPORTS_STACK_ALIGNMENT
)
3714 if (cfun
->calls_alloca
3715 || cfun
->has_nonlocal_label
3716 || crtl
->has_nonlocal_goto
)
3717 crtl
->need_drap
= true;
3719 /* Call update_stack_boundary here again to update incoming stack
3720 boundary. It may set incoming stack alignment to a different
3721 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3722 use the minimum incoming stack alignment to check if it is OK
3723 to perform sibcall optimization since sibcall optimization will
3724 only align the outgoing stack to incoming stack boundary. */
3725 if (targetm
.calls
.update_stack_boundary
)
3726 targetm
.calls
.update_stack_boundary ();
3728 /* The incoming stack frame has to be aligned at least at
3729 parm_stack_boundary. */
3730 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
3732 /* Update crtl->stack_alignment_estimated and use it later to align
3733 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3734 exceptions since callgraph doesn't collect incoming stack alignment
3736 if (cfun
->can_throw_non_call_exceptions
3737 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
3738 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
3740 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
3741 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
3742 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
3743 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
3744 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
3746 gcc_assert (crtl
->stack_alignment_needed
3747 <= crtl
->stack_alignment_estimated
);
3749 crtl
->stack_realign_needed
3750 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
3751 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
3753 crtl
->stack_realign_processed
= true;
3755 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3757 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
3758 drap_rtx
= targetm
.calls
.get_drap_rtx ();
3760 /* stack_realign_drap and drap_rtx must match. */
3761 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
3763 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3764 if (NULL
!= drap_rtx
)
3766 crtl
->args
.internal_arg_pointer
= drap_rtx
;
3768 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3770 fixup_tail_calls ();
3774 /* Translate the intermediate representation contained in the CFG
3775 from GIMPLE trees to RTL.
3777 We do conversion per basic block and preserve/update the tree CFG.
3778 This implies we have to do some magic as the CFG can simultaneously
3779 consist of basic blocks containing RTL and GIMPLE trees. This can
3780 confuse the CFG hooks, so be careful to not manipulate CFG during
3784 gimple_expand_cfg (void)
3786 basic_block bb
, init_block
;
3792 timevar_push (TV_OUT_OF_SSA
);
3793 rewrite_out_of_ssa (&SA
);
3794 timevar_pop (TV_OUT_OF_SSA
);
3795 SA
.partition_to_pseudo
= (rtx
*)xcalloc (SA
.map
->num_partitions
,
3798 /* Some backends want to know that we are expanding to RTL. */
3799 currently_expanding_to_rtl
= 1;
3801 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
3803 insn_locators_alloc ();
3804 if (!DECL_IS_BUILTIN (current_function_decl
))
3806 /* Eventually, all FEs should explicitly set function_start_locus. */
3807 if (cfun
->function_start_locus
== UNKNOWN_LOCATION
)
3808 set_curr_insn_source_location
3809 (DECL_SOURCE_LOCATION (current_function_decl
));
3811 set_curr_insn_source_location (cfun
->function_start_locus
);
3813 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
3814 prologue_locator
= curr_insn_locator ();
3816 #ifdef INSN_SCHEDULING
3817 init_sched_attrs ();
3820 /* Make sure first insn is a note even if we don't want linenums.
3821 This makes sure the first insn will never be deleted.
3822 Also, final expects a note to appear there. */
3823 emit_note (NOTE_INSN_DELETED
);
3825 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3826 discover_nonconstant_array_refs ();
3828 targetm
.expand_to_rtl_hook ();
3829 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
3830 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
3831 crtl
->stack_alignment_estimated
= 0;
3832 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
3833 cfun
->cfg
->max_jumptable_ents
= 0;
3836 /* Expand the variables recorded during gimple lowering. */
3837 timevar_push (TV_VAR_EXPAND
);
3838 expand_used_vars ();
3839 timevar_pop (TV_VAR_EXPAND
);
3841 /* Honor stack protection warnings. */
3842 if (warn_stack_protect
)
3844 if (cfun
->calls_alloca
)
3845 warning (OPT_Wstack_protector
,
3846 "stack protector not protecting local variables: "
3847 "variable length buffer");
3848 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
3849 warning (OPT_Wstack_protector
,
3850 "stack protector not protecting function: "
3851 "all local arrays are less than %d bytes long",
3852 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
3855 /* Set up parameters and prepare for return, for the function. */
3856 expand_function_start (current_function_decl
);
3858 /* Now that we also have the parameter RTXs, copy them over to our
3860 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
3862 tree var
= SSA_NAME_VAR (partition_to_var (SA
.map
, i
));
3864 if (TREE_CODE (var
) != VAR_DECL
3865 && !SA
.partition_to_pseudo
[i
])
3866 SA
.partition_to_pseudo
[i
] = DECL_RTL_IF_SET (var
);
3867 gcc_assert (SA
.partition_to_pseudo
[i
]);
3869 /* If this decl was marked as living in multiple places, reset
3870 this now to NULL. */
3871 if (DECL_RTL_IF_SET (var
) == pc_rtx
)
3872 SET_DECL_RTL (var
, NULL
);
3874 /* Some RTL parts really want to look at DECL_RTL(x) when x
3875 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3876 SET_DECL_RTL here making this available, but that would mean
3877 to select one of the potentially many RTLs for one DECL. Instead
3878 of doing that we simply reset the MEM_EXPR of the RTL in question,
3879 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3880 if (!DECL_RTL_SET_P (var
))
3882 if (MEM_P (SA
.partition_to_pseudo
[i
]))
3883 set_mem_expr (SA
.partition_to_pseudo
[i
], NULL
);
3887 /* If this function is `main', emit a call to `__main'
3888 to run global initializers, etc. */
3889 if (DECL_NAME (current_function_decl
)
3890 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
3891 && DECL_FILE_SCOPE_P (current_function_decl
))
3892 expand_main_function ();
3894 /* Initialize the stack_protect_guard field. This must happen after the
3895 call to __main (if any) so that the external decl is initialized. */
3896 if (crtl
->stack_protect_guard
)
3897 stack_protect_prologue ();
3899 expand_phi_nodes (&SA
);
3901 /* Register rtl specific functions for cfg. */
3902 rtl_register_cfg_hooks ();
3904 init_block
= construct_init_block ();
3906 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3907 remaining edges later. */
3908 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
3909 e
->flags
&= ~EDGE_EXECUTABLE
;
3911 lab_rtx_for_bb
= pointer_map_create ();
3912 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
3913 bb
= expand_gimple_basic_block (bb
);
3915 if (MAY_HAVE_DEBUG_INSNS
)
3916 expand_debug_locations ();
3918 execute_free_datastructures ();
3919 timevar_push (TV_OUT_OF_SSA
);
3920 finish_out_of_ssa (&SA
);
3921 timevar_pop (TV_OUT_OF_SSA
);
3923 timevar_push (TV_POST_EXPAND
);
3924 /* We are no longer in SSA form. */
3925 cfun
->gimple_df
->in_ssa_p
= false;
3927 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3928 conservatively to true until they are all profile aware. */
3929 pointer_map_destroy (lab_rtx_for_bb
);
3932 construct_exit_block ();
3933 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
3934 insn_locators_finalize ();
3936 /* Zap the tree EH table. */
3937 set_eh_throw_stmt_table (cfun
, NULL
);
3939 rebuild_jump_labels (get_insns ());
3941 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
3945 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3948 commit_one_edge_insertion (e
);
3954 /* We're done expanding trees to RTL. */
3955 currently_expanding_to_rtl
= 0;
3957 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
3961 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3963 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3964 e
->flags
&= ~EDGE_EXECUTABLE
;
3966 /* At the moment not all abnormal edges match the RTL
3967 representation. It is safe to remove them here as
3968 find_many_sub_basic_blocks will rediscover them.
3969 In the future we should get this fixed properly. */
3970 if ((e
->flags
& EDGE_ABNORMAL
)
3971 && !(e
->flags
& EDGE_SIBCALL
))
3978 blocks
= sbitmap_alloc (last_basic_block
);
3979 sbitmap_ones (blocks
);
3980 find_many_sub_basic_blocks (blocks
);
3981 sbitmap_free (blocks
);
3982 purge_all_dead_edges ();
3986 expand_stack_alignment ();
3988 #ifdef ENABLE_CHECKING
3989 verify_flow_info ();
3992 /* There's no need to defer outputting this function any more; we
3993 know we want to output it. */
3994 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
3996 /* Now that we're done expanding trees to RTL, we shouldn't have any
3997 more CONCATs anywhere. */
3998 generating_concat_p
= 0;
4003 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4004 /* And the pass manager will dump RTL for us. */
4007 /* If we're emitting a nested function, make sure its parent gets
4008 emitted as well. Doing otherwise confuses debug info. */
4011 for (parent
= DECL_CONTEXT (current_function_decl
);
4012 parent
!= NULL_TREE
;
4013 parent
= get_containing_scope (parent
))
4014 if (TREE_CODE (parent
) == FUNCTION_DECL
)
4015 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
4018 /* We are now committed to emitting code for this function. Do any
4019 preparation, such as emitting abstract debug info for the inline
4020 before it gets mangled by optimization. */
4021 if (cgraph_function_possibly_inlined_p (current_function_decl
))
4022 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
4024 TREE_ASM_WRITTEN (current_function_decl
) = 1;
4026 /* After expanding, the return labels are no longer needed. */
4027 return_label
= NULL
;
4028 naked_return_label
= NULL
;
4029 /* Tag the blocks with a depth number so that change_scope can find
4030 the common parent easily. */
4031 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
4032 default_rtl_profile ();
4033 timevar_pop (TV_POST_EXPAND
);
4037 struct rtl_opt_pass pass_expand
=
4041 "expand", /* name */
4043 gimple_expand_cfg
, /* execute */
4046 0, /* static_pass_number */
4047 TV_EXPAND
, /* tv_id */
4048 PROP_ssa
| PROP_gimple_leh
| PROP_cfg
4049 | PROP_gimple_lcx
, /* properties_required */
4050 PROP_rtl
, /* properties_provided */
4051 PROP_ssa
| PROP_trees
, /* properties_destroyed */
4052 TODO_verify_ssa
| TODO_verify_flow
4053 | TODO_verify_stmts
, /* todo_flags_start */
4055 | TODO_ggc_collect
/* todo_flags_finish */