* zh_CN.po: Update.
[official-gcc.git] / gcc / cfgexpand.c
blobee14599340459d201a0b453165dfd8e9d8ae99bd
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "timevar.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "except.h"
37 #include "flags.h"
38 #include "diagnostic.h"
39 #include "toplev.h"
40 #include "debug.h"
41 #include "params.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
44 #include "target.h"
45 #include "ssaexpand.h"
48 /* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50 struct ssaexpand SA;
52 /* This variable holds the currently expanded gimple statement for purposes
53 of comminucating the profile info to the builtin expanders. */
54 gimple currently_expanding_gimple_stmt;
56 /* Return an expression tree corresponding to the RHS of GIMPLE
57 statement STMT. */
59 tree
60 gimple_assign_rhs_to_tree (gimple stmt)
62 tree t;
63 enum gimple_rhs_class grhs_class;
65 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
67 if (grhs_class == GIMPLE_BINARY_RHS)
68 t = build2 (gimple_assign_rhs_code (stmt),
69 TREE_TYPE (gimple_assign_lhs (stmt)),
70 gimple_assign_rhs1 (stmt),
71 gimple_assign_rhs2 (stmt));
72 else if (grhs_class == GIMPLE_UNARY_RHS)
73 t = build1 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt));
76 else if (grhs_class == GIMPLE_SINGLE_RHS)
78 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */
80 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t))
82 t = copy_node (t);
84 else
85 gcc_unreachable ();
87 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
88 SET_EXPR_LOCATION (t, gimple_location (stmt));
90 return t;
94 #ifndef STACK_ALIGNMENT_NEEDED
95 #define STACK_ALIGNMENT_NEEDED 1
96 #endif
98 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
100 /* Associate declaration T with storage space X. If T is no
101 SSA name this is exactly SET_DECL_RTL, otherwise make the
102 partition of T associated with X. */
103 static inline void
104 set_rtl (tree t, rtx x)
106 if (TREE_CODE (t) == SSA_NAME)
108 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
109 if (x && !MEM_P (x))
110 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
111 /* For the benefit of debug information at -O0 (where vartracking
112 doesn't run) record the place also in the base DECL if it's
113 a normal variable (not a parameter). */
114 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
116 tree var = SSA_NAME_VAR (t);
117 /* If we don't yet have something recorded, just record it now. */
118 if (!DECL_RTL_SET_P (var))
119 SET_DECL_RTL (var, x);
120 /* If we have it set alrady to "multiple places" don't
121 change this. */
122 else if (DECL_RTL (var) == pc_rtx)
124 /* If we have something recorded and it's not the same place
125 as we want to record now, we have multiple partitions for the
126 same base variable, with different places. We can't just
127 randomly chose one, hence we have to say that we don't know.
128 This only happens with optimization, and there var-tracking
129 will figure out the right thing. */
130 else if (DECL_RTL (var) != x)
131 SET_DECL_RTL (var, pc_rtx);
134 else
135 SET_DECL_RTL (t, x);
138 /* This structure holds data relevant to one variable that will be
139 placed in a stack slot. */
140 struct stack_var
142 /* The Variable. */
143 tree decl;
145 /* The offset of the variable. During partitioning, this is the
146 offset relative to the partition. After partitioning, this
147 is relative to the stack frame. */
148 HOST_WIDE_INT offset;
150 /* Initially, the size of the variable. Later, the size of the partition,
151 if this variable becomes it's partition's representative. */
152 HOST_WIDE_INT size;
154 /* The *byte* alignment required for this variable. Or as, with the
155 size, the alignment for this partition. */
156 unsigned int alignb;
158 /* The partition representative. */
159 size_t representative;
161 /* The next stack variable in the partition, or EOC. */
162 size_t next;
164 /* The numbers of conflicting stack variables. */
165 bitmap conflicts;
168 #define EOC ((size_t)-1)
170 /* We have an array of such objects while deciding allocation. */
171 static struct stack_var *stack_vars;
172 static size_t stack_vars_alloc;
173 static size_t stack_vars_num;
175 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
176 is non-decreasing. */
177 static size_t *stack_vars_sorted;
179 /* The phase of the stack frame. This is the known misalignment of
180 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
181 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
182 static int frame_phase;
184 /* Used during expand_used_vars to remember if we saw any decls for
185 which we'd like to enable stack smashing protection. */
186 static bool has_protected_decls;
188 /* Used during expand_used_vars. Remember if we say a character buffer
189 smaller than our cutoff threshold. Used for -Wstack-protector. */
190 static bool has_short_buffer;
192 /* Discover the byte alignment to use for DECL. Ignore alignment
193 we can't do with expected alignment of the stack boundary. */
195 static unsigned int
196 get_decl_align_unit (tree decl)
198 unsigned int align;
200 align = LOCAL_DECL_ALIGNMENT (decl);
202 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
203 align = MAX_SUPPORTED_STACK_ALIGNMENT;
205 if (SUPPORTS_STACK_ALIGNMENT)
207 if (crtl->stack_alignment_estimated < align)
209 gcc_assert(!crtl->stack_realign_processed);
210 crtl->stack_alignment_estimated = align;
214 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
215 So here we only make sure stack_alignment_needed >= align. */
216 if (crtl->stack_alignment_needed < align)
217 crtl->stack_alignment_needed = align;
218 if (crtl->max_used_stack_slot_alignment < align)
219 crtl->max_used_stack_slot_alignment = align;
221 return align / BITS_PER_UNIT;
224 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
225 Return the frame offset. */
227 static HOST_WIDE_INT
228 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
230 HOST_WIDE_INT offset, new_frame_offset;
232 new_frame_offset = frame_offset;
233 if (FRAME_GROWS_DOWNWARD)
235 new_frame_offset -= size + frame_phase;
236 new_frame_offset &= -align;
237 new_frame_offset += frame_phase;
238 offset = new_frame_offset;
240 else
242 new_frame_offset -= frame_phase;
243 new_frame_offset += align - 1;
244 new_frame_offset &= -align;
245 new_frame_offset += frame_phase;
246 offset = new_frame_offset;
247 new_frame_offset += size;
249 frame_offset = new_frame_offset;
251 if (frame_offset_overflow (frame_offset, cfun->decl))
252 frame_offset = offset = 0;
254 return offset;
257 /* Accumulate DECL into STACK_VARS. */
259 static void
260 add_stack_var (tree decl)
262 if (stack_vars_num >= stack_vars_alloc)
264 if (stack_vars_alloc)
265 stack_vars_alloc = stack_vars_alloc * 3 / 2;
266 else
267 stack_vars_alloc = 32;
268 stack_vars
269 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
271 stack_vars[stack_vars_num].decl = decl;
272 stack_vars[stack_vars_num].offset = 0;
273 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
276 /* All variables are initially in their own partition. */
277 stack_vars[stack_vars_num].representative = stack_vars_num;
278 stack_vars[stack_vars_num].next = EOC;
280 /* All variables initially conflict with no other. */
281 stack_vars[stack_vars_num].conflicts = NULL;
283 /* Ensure that this decl doesn't get put onto the list twice. */
284 set_rtl (decl, pc_rtx);
286 stack_vars_num++;
289 /* Make the decls associated with luid's X and Y conflict. */
291 static void
292 add_stack_var_conflict (size_t x, size_t y)
294 struct stack_var *a = &stack_vars[x];
295 struct stack_var *b = &stack_vars[y];
296 if (!a->conflicts)
297 a->conflicts = BITMAP_ALLOC (NULL);
298 if (!b->conflicts)
299 b->conflicts = BITMAP_ALLOC (NULL);
300 bitmap_set_bit (a->conflicts, y);
301 bitmap_set_bit (b->conflicts, x);
304 /* Check whether the decls associated with luid's X and Y conflict. */
306 static bool
307 stack_var_conflict_p (size_t x, size_t y)
309 struct stack_var *a = &stack_vars[x];
310 struct stack_var *b = &stack_vars[y];
311 if (!a->conflicts || !b->conflicts)
312 return false;
313 return bitmap_bit_p (a->conflicts, y);
316 /* Returns true if TYPE is or contains a union type. */
318 static bool
319 aggregate_contains_union_type (tree type)
321 tree field;
323 if (TREE_CODE (type) == UNION_TYPE
324 || TREE_CODE (type) == QUAL_UNION_TYPE)
325 return true;
326 if (TREE_CODE (type) == ARRAY_TYPE)
327 return aggregate_contains_union_type (TREE_TYPE (type));
328 if (TREE_CODE (type) != RECORD_TYPE)
329 return false;
331 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
332 if (TREE_CODE (field) == FIELD_DECL)
333 if (aggregate_contains_union_type (TREE_TYPE (field)))
334 return true;
336 return false;
339 /* A subroutine of expand_used_vars. If two variables X and Y have alias
340 sets that do not conflict, then do add a conflict for these variables
341 in the interference graph. We also need to make sure to add conflicts
342 for union containing structures. Else RTL alias analysis comes along
343 and due to type based aliasing rules decides that for two overlapping
344 union temporaries { short s; int i; } accesses to the same mem through
345 different types may not alias and happily reorders stores across
346 life-time boundaries of the temporaries (See PR25654).
347 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
349 static void
350 add_alias_set_conflicts (void)
352 size_t i, j, n = stack_vars_num;
354 for (i = 0; i < n; ++i)
356 tree type_i = TREE_TYPE (stack_vars[i].decl);
357 bool aggr_i = AGGREGATE_TYPE_P (type_i);
358 bool contains_union;
360 contains_union = aggregate_contains_union_type (type_i);
361 for (j = 0; j < i; ++j)
363 tree type_j = TREE_TYPE (stack_vars[j].decl);
364 bool aggr_j = AGGREGATE_TYPE_P (type_j);
365 if (aggr_i != aggr_j
366 /* Either the objects conflict by means of type based
367 aliasing rules, or we need to add a conflict. */
368 || !objects_must_conflict_p (type_i, type_j)
369 /* In case the types do not conflict ensure that access
370 to elements will conflict. In case of unions we have
371 to be careful as type based aliasing rules may say
372 access to the same memory does not conflict. So play
373 safe and add a conflict in this case. */
374 || contains_union)
375 add_stack_var_conflict (i, j);
380 /* A subroutine of partition_stack_vars. A comparison function for qsort,
381 sorting an array of indices by the size and type of the object. */
383 static int
384 stack_var_size_cmp (const void *a, const void *b)
386 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
387 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
388 tree decla, declb;
389 unsigned int uida, uidb;
391 if (sa < sb)
392 return -1;
393 if (sa > sb)
394 return 1;
395 decla = stack_vars[*(const size_t *)a].decl;
396 declb = stack_vars[*(const size_t *)b].decl;
397 /* For stack variables of the same size use and id of the decls
398 to make the sort stable. Two SSA names are compared by their
399 version, SSA names come before non-SSA names, and two normal
400 decls are compared by their DECL_UID. */
401 if (TREE_CODE (decla) == SSA_NAME)
403 if (TREE_CODE (declb) == SSA_NAME)
404 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
405 else
406 return -1;
408 else if (TREE_CODE (declb) == SSA_NAME)
409 return 1;
410 else
411 uida = DECL_UID (decla), uidb = DECL_UID (declb);
412 if (uida < uidb)
413 return -1;
414 if (uida > uidb)
415 return 1;
416 return 0;
420 /* If the points-to solution *PI points to variables that are in a partition
421 together with other variables add all partition members to the pointed-to
422 variables bitmap. */
424 static void
425 add_partitioned_vars_to_ptset (struct pt_solution *pt,
426 struct pointer_map_t *decls_to_partitions,
427 struct pointer_set_t *visited, bitmap temp)
429 bitmap_iterator bi;
430 unsigned i;
431 bitmap *part;
433 if (pt->anything
434 || pt->vars == NULL
435 /* The pointed-to vars bitmap is shared, it is enough to
436 visit it once. */
437 || pointer_set_insert(visited, pt->vars))
438 return;
440 bitmap_clear (temp);
442 /* By using a temporary bitmap to store all members of the partitions
443 we have to add we make sure to visit each of the partitions only
444 once. */
445 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
446 if ((!temp
447 || !bitmap_bit_p (temp, i))
448 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
449 (void *)(size_t) i)))
450 bitmap_ior_into (temp, *part);
451 if (!bitmap_empty_p (temp))
452 bitmap_ior_into (pt->vars, temp);
455 /* Update points-to sets based on partition info, so we can use them on RTL.
456 The bitmaps representing stack partitions will be saved until expand,
457 where partitioned decls used as bases in memory expressions will be
458 rewritten. */
460 static void
461 update_alias_info_with_stack_vars (void)
463 struct pointer_map_t *decls_to_partitions = NULL;
464 size_t i, j;
465 tree var = NULL_TREE;
467 for (i = 0; i < stack_vars_num; i++)
469 bitmap part = NULL;
470 tree name;
471 struct ptr_info_def *pi;
473 /* Not interested in partitions with single variable. */
474 if (stack_vars[i].representative != i
475 || stack_vars[i].next == EOC)
476 continue;
478 if (!decls_to_partitions)
480 decls_to_partitions = pointer_map_create ();
481 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
484 /* Create an SSA_NAME that points to the partition for use
485 as base during alias-oracle queries on RTL for bases that
486 have been partitioned. */
487 if (var == NULL_TREE)
488 var = create_tmp_var (ptr_type_node, NULL);
489 name = make_ssa_name (var, NULL);
491 /* Create bitmaps representing partitions. They will be used for
492 points-to sets later, so use GGC alloc. */
493 part = BITMAP_GGC_ALLOC ();
494 for (j = i; j != EOC; j = stack_vars[j].next)
496 tree decl = stack_vars[j].decl;
497 unsigned int uid = DECL_UID (decl);
498 /* We should never end up partitioning SSA names (though they
499 may end up on the stack). Neither should we allocate stack
500 space to something that is unused and thus unreferenced. */
501 gcc_assert (DECL_P (decl)
502 && referenced_var_lookup (uid));
503 bitmap_set_bit (part, uid);
504 *((bitmap *) pointer_map_insert (decls_to_partitions,
505 (void *)(size_t) uid)) = part;
506 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
507 decl)) = name;
510 /* Make the SSA name point to all partition members. */
511 pi = get_ptr_info (name);
512 pt_solution_set (&pi->pt, part);
515 /* Make all points-to sets that contain one member of a partition
516 contain all members of the partition. */
517 if (decls_to_partitions)
519 unsigned i;
520 struct pointer_set_t *visited = pointer_set_create ();
521 bitmap temp = BITMAP_ALLOC (NULL);
523 for (i = 1; i < num_ssa_names; i++)
525 tree name = ssa_name (i);
526 struct ptr_info_def *pi;
528 if (name
529 && POINTER_TYPE_P (TREE_TYPE (name))
530 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
531 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
532 visited, temp);
535 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
536 decls_to_partitions, visited, temp);
537 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
538 decls_to_partitions, visited, temp);
540 pointer_set_destroy (visited);
541 pointer_map_destroy (decls_to_partitions);
542 BITMAP_FREE (temp);
546 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
547 partitioning algorithm. Partitions A and B are known to be non-conflicting.
548 Merge them into a single partition A.
550 At the same time, add OFFSET to all variables in partition B. At the end
551 of the partitioning process we've have a nice block easy to lay out within
552 the stack frame. */
554 static void
555 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
557 size_t i, last;
558 struct stack_var *vb = &stack_vars[b];
559 bitmap_iterator bi;
560 unsigned u;
562 /* Update each element of partition B with the given offset,
563 and merge them into partition A. */
564 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
566 stack_vars[i].offset += offset;
567 stack_vars[i].representative = a;
569 stack_vars[last].next = stack_vars[a].next;
570 stack_vars[a].next = b;
572 /* Update the required alignment of partition A to account for B. */
573 if (stack_vars[a].alignb < stack_vars[b].alignb)
574 stack_vars[a].alignb = stack_vars[b].alignb;
576 /* Update the interference graph and merge the conflicts. */
577 if (vb->conflicts)
579 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
580 add_stack_var_conflict (a, stack_vars[u].representative);
581 BITMAP_FREE (vb->conflicts);
585 /* A subroutine of expand_used_vars. Binpack the variables into
586 partitions constrained by the interference graph. The overall
587 algorithm used is as follows:
589 Sort the objects by size.
590 For each object A {
591 S = size(A)
592 O = 0
593 loop {
594 Look for the largest non-conflicting object B with size <= S.
595 UNION (A, B)
596 offset(B) = O
597 O += size(B)
598 S -= size(B)
603 static void
604 partition_stack_vars (void)
606 size_t si, sj, n = stack_vars_num;
608 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
609 for (si = 0; si < n; ++si)
610 stack_vars_sorted[si] = si;
612 if (n == 1)
613 return;
615 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
617 for (si = 0; si < n; ++si)
619 size_t i = stack_vars_sorted[si];
620 HOST_WIDE_INT isize = stack_vars[i].size;
621 HOST_WIDE_INT offset = 0;
623 for (sj = si; sj-- > 0; )
625 size_t j = stack_vars_sorted[sj];
626 HOST_WIDE_INT jsize = stack_vars[j].size;
627 unsigned int jalign = stack_vars[j].alignb;
629 /* Ignore objects that aren't partition representatives. */
630 if (stack_vars[j].representative != j)
631 continue;
633 /* Ignore objects too large for the remaining space. */
634 if (isize < jsize)
635 continue;
637 /* Ignore conflicting objects. */
638 if (stack_var_conflict_p (i, j))
639 continue;
641 /* Refine the remaining space check to include alignment. */
642 if (offset & (jalign - 1))
644 HOST_WIDE_INT toff = offset;
645 toff += jalign - 1;
646 toff &= -(HOST_WIDE_INT)jalign;
647 if (isize - (toff - offset) < jsize)
648 continue;
650 isize -= toff - offset;
651 offset = toff;
654 /* UNION the objects, placing J at OFFSET. */
655 union_stack_vars (i, j, offset);
657 isize -= jsize;
658 if (isize == 0)
659 break;
663 if (optimize)
664 update_alias_info_with_stack_vars ();
667 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
669 static void
670 dump_stack_var_partition (void)
672 size_t si, i, j, n = stack_vars_num;
674 for (si = 0; si < n; ++si)
676 i = stack_vars_sorted[si];
678 /* Skip variables that aren't partition representatives, for now. */
679 if (stack_vars[i].representative != i)
680 continue;
682 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
683 " align %u\n", (unsigned long) i, stack_vars[i].size,
684 stack_vars[i].alignb);
686 for (j = i; j != EOC; j = stack_vars[j].next)
688 fputc ('\t', dump_file);
689 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
690 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
691 stack_vars[j].offset);
696 /* Assign rtl to DECL at frame offset OFFSET. */
698 static void
699 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
701 /* Alignment is unsigned. */
702 unsigned HOST_WIDE_INT align;
703 rtx x;
705 /* If this fails, we've overflowed the stack frame. Error nicely? */
706 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
708 x = plus_constant (virtual_stack_vars_rtx, offset);
709 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
711 if (TREE_CODE (decl) != SSA_NAME)
713 /* Set alignment we actually gave this decl if it isn't an SSA name.
714 If it is we generate stack slots only accidentally so it isn't as
715 important, we'll simply use the alignment that is already set. */
716 offset -= frame_phase;
717 align = offset & -offset;
718 align *= BITS_PER_UNIT;
719 if (align == 0)
720 align = STACK_BOUNDARY;
721 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
722 align = MAX_SUPPORTED_STACK_ALIGNMENT;
724 DECL_ALIGN (decl) = align;
725 DECL_USER_ALIGN (decl) = 0;
728 set_mem_attributes (x, SSAVAR (decl), true);
729 set_rtl (decl, x);
732 /* A subroutine of expand_used_vars. Give each partition representative
733 a unique location within the stack frame. Update each partition member
734 with that location. */
736 static void
737 expand_stack_vars (bool (*pred) (tree))
739 size_t si, i, j, n = stack_vars_num;
741 for (si = 0; si < n; ++si)
743 HOST_WIDE_INT offset;
745 i = stack_vars_sorted[si];
747 /* Skip variables that aren't partition representatives, for now. */
748 if (stack_vars[i].representative != i)
749 continue;
751 /* Skip variables that have already had rtl assigned. See also
752 add_stack_var where we perpetrate this pc_rtx hack. */
753 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
754 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
755 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
756 continue;
758 /* Check the predicate to see whether this variable should be
759 allocated in this pass. */
760 if (pred && !pred (stack_vars[i].decl))
761 continue;
763 offset = alloc_stack_frame_space (stack_vars[i].size,
764 stack_vars[i].alignb);
766 /* Create rtl for each variable based on their location within the
767 partition. */
768 for (j = i; j != EOC; j = stack_vars[j].next)
770 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
771 expand_one_stack_var_at (stack_vars[j].decl,
772 stack_vars[j].offset + offset);
777 /* Take into account all sizes of partitions and reset DECL_RTLs. */
778 static HOST_WIDE_INT
779 account_stack_vars (void)
781 size_t si, j, i, n = stack_vars_num;
782 HOST_WIDE_INT size = 0;
784 for (si = 0; si < n; ++si)
786 i = stack_vars_sorted[si];
788 /* Skip variables that aren't partition representatives, for now. */
789 if (stack_vars[i].representative != i)
790 continue;
792 size += stack_vars[i].size;
793 for (j = i; j != EOC; j = stack_vars[j].next)
794 set_rtl (stack_vars[j].decl, NULL);
796 return size;
799 /* A subroutine of expand_one_var. Called to immediately assign rtl
800 to a variable to be allocated in the stack frame. */
802 static void
803 expand_one_stack_var (tree var)
805 HOST_WIDE_INT size, offset, align;
807 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
808 align = get_decl_align_unit (SSAVAR (var));
809 offset = alloc_stack_frame_space (size, align);
811 expand_one_stack_var_at (var, offset);
814 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
815 that will reside in a hard register. */
817 static void
818 expand_one_hard_reg_var (tree var)
820 rest_of_decl_compilation (var, 0, 0);
823 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
824 that will reside in a pseudo register. */
826 static void
827 expand_one_register_var (tree var)
829 tree decl = SSAVAR (var);
830 tree type = TREE_TYPE (decl);
831 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
832 rtx x = gen_reg_rtx (reg_mode);
834 set_rtl (var, x);
836 /* Note if the object is a user variable. */
837 if (!DECL_ARTIFICIAL (decl))
838 mark_user_reg (x);
840 if (POINTER_TYPE_P (type))
841 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
844 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
845 has some associated error, e.g. its type is error-mark. We just need
846 to pick something that won't crash the rest of the compiler. */
848 static void
849 expand_one_error_var (tree var)
851 enum machine_mode mode = DECL_MODE (var);
852 rtx x;
854 if (mode == BLKmode)
855 x = gen_rtx_MEM (BLKmode, const0_rtx);
856 else if (mode == VOIDmode)
857 x = const0_rtx;
858 else
859 x = gen_reg_rtx (mode);
861 SET_DECL_RTL (var, x);
864 /* A subroutine of expand_one_var. VAR is a variable that will be
865 allocated to the local stack frame. Return true if we wish to
866 add VAR to STACK_VARS so that it will be coalesced with other
867 variables. Return false to allocate VAR immediately.
869 This function is used to reduce the number of variables considered
870 for coalescing, which reduces the size of the quadratic problem. */
872 static bool
873 defer_stack_allocation (tree var, bool toplevel)
875 /* If stack protection is enabled, *all* stack variables must be deferred,
876 so that we can re-order the strings to the top of the frame. */
877 if (flag_stack_protect)
878 return true;
880 /* Variables in the outermost scope automatically conflict with
881 every other variable. The only reason to want to defer them
882 at all is that, after sorting, we can more efficiently pack
883 small variables in the stack frame. Continue to defer at -O2. */
884 if (toplevel && optimize < 2)
885 return false;
887 /* Without optimization, *most* variables are allocated from the
888 stack, which makes the quadratic problem large exactly when we
889 want compilation to proceed as quickly as possible. On the
890 other hand, we don't want the function's stack frame size to
891 get completely out of hand. So we avoid adding scalars and
892 "small" aggregates to the list at all. */
893 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
894 return false;
896 return true;
899 /* A subroutine of expand_used_vars. Expand one variable according to
900 its flavor. Variables to be placed on the stack are not actually
901 expanded yet, merely recorded.
902 When REALLY_EXPAND is false, only add stack values to be allocated.
903 Return stack usage this variable is supposed to take.
906 static HOST_WIDE_INT
907 expand_one_var (tree var, bool toplevel, bool really_expand)
909 tree origvar = var;
910 var = SSAVAR (var);
912 if (SUPPORTS_STACK_ALIGNMENT
913 && TREE_TYPE (var) != error_mark_node
914 && TREE_CODE (var) == VAR_DECL)
916 unsigned int align;
918 /* Because we don't know if VAR will be in register or on stack,
919 we conservatively assume it will be on stack even if VAR is
920 eventually put into register after RA pass. For non-automatic
921 variables, which won't be on stack, we collect alignment of
922 type and ignore user specified alignment. */
923 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
924 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
925 TYPE_MODE (TREE_TYPE (var)),
926 TYPE_ALIGN (TREE_TYPE (var)));
927 else
928 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
930 if (crtl->stack_alignment_estimated < align)
932 /* stack_alignment_estimated shouldn't change after stack
933 realign decision made */
934 gcc_assert(!crtl->stack_realign_processed);
935 crtl->stack_alignment_estimated = align;
939 if (TREE_CODE (origvar) == SSA_NAME)
941 gcc_assert (TREE_CODE (var) != VAR_DECL
942 || (!DECL_EXTERNAL (var)
943 && !DECL_HAS_VALUE_EXPR_P (var)
944 && !TREE_STATIC (var)
945 && TREE_TYPE (var) != error_mark_node
946 && !DECL_HARD_REGISTER (var)
947 && really_expand));
949 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
951 else if (DECL_EXTERNAL (var))
953 else if (DECL_HAS_VALUE_EXPR_P (var))
955 else if (TREE_STATIC (var))
957 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
959 else if (TREE_TYPE (var) == error_mark_node)
961 if (really_expand)
962 expand_one_error_var (var);
964 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
966 if (really_expand)
967 expand_one_hard_reg_var (var);
969 else if (use_register_for_decl (var))
971 if (really_expand)
972 expand_one_register_var (origvar);
974 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
976 if (really_expand)
978 error ("size of variable %q+D is too large", var);
979 expand_one_error_var (var);
982 else if (defer_stack_allocation (var, toplevel))
983 add_stack_var (origvar);
984 else
986 if (really_expand)
987 expand_one_stack_var (origvar);
988 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
990 return 0;
993 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
994 expanding variables. Those variables that can be put into registers
995 are allocated pseudos; those that can't are put on the stack.
997 TOPLEVEL is true if this is the outermost BLOCK. */
999 static void
1000 expand_used_vars_for_block (tree block, bool toplevel)
1002 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1003 tree t;
1005 old_sv_num = toplevel ? 0 : stack_vars_num;
1007 /* Expand all variables at this level. */
1008 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1009 if (TREE_USED (t))
1010 expand_one_var (t, toplevel, true);
1012 this_sv_num = stack_vars_num;
1014 /* Expand all variables at containing levels. */
1015 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1016 expand_used_vars_for_block (t, false);
1018 /* Since we do not track exact variable lifetimes (which is not even
1019 possible for variables whose address escapes), we mirror the block
1020 tree in the interference graph. Here we cause all variables at this
1021 level, and all sublevels, to conflict. */
1022 if (old_sv_num < this_sv_num)
1024 new_sv_num = stack_vars_num;
1026 for (i = old_sv_num; i < new_sv_num; ++i)
1027 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1028 add_stack_var_conflict (i, j);
1032 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1033 and clear TREE_USED on all local variables. */
1035 static void
1036 clear_tree_used (tree block)
1038 tree t;
1040 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1041 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1042 TREE_USED (t) = 0;
1044 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1045 clear_tree_used (t);
1048 /* Examine TYPE and determine a bit mask of the following features. */
1050 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1051 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1052 #define SPCT_HAS_ARRAY 4
1053 #define SPCT_HAS_AGGREGATE 8
1055 static unsigned int
1056 stack_protect_classify_type (tree type)
1058 unsigned int ret = 0;
1059 tree t;
1061 switch (TREE_CODE (type))
1063 case ARRAY_TYPE:
1064 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1065 if (t == char_type_node
1066 || t == signed_char_type_node
1067 || t == unsigned_char_type_node)
1069 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1070 unsigned HOST_WIDE_INT len;
1072 if (!TYPE_SIZE_UNIT (type)
1073 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1074 len = max;
1075 else
1076 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1078 if (len < max)
1079 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1080 else
1081 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1083 else
1084 ret = SPCT_HAS_ARRAY;
1085 break;
1087 case UNION_TYPE:
1088 case QUAL_UNION_TYPE:
1089 case RECORD_TYPE:
1090 ret = SPCT_HAS_AGGREGATE;
1091 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1092 if (TREE_CODE (t) == FIELD_DECL)
1093 ret |= stack_protect_classify_type (TREE_TYPE (t));
1094 break;
1096 default:
1097 break;
1100 return ret;
1103 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1104 part of the local stack frame. Remember if we ever return nonzero for
1105 any variable in this function. The return value is the phase number in
1106 which the variable should be allocated. */
1108 static int
1109 stack_protect_decl_phase (tree decl)
1111 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1112 int ret = 0;
1114 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1115 has_short_buffer = true;
1117 if (flag_stack_protect == 2)
1119 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1120 && !(bits & SPCT_HAS_AGGREGATE))
1121 ret = 1;
1122 else if (bits & SPCT_HAS_ARRAY)
1123 ret = 2;
1125 else
1126 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1128 if (ret)
1129 has_protected_decls = true;
1131 return ret;
1134 /* Two helper routines that check for phase 1 and phase 2. These are used
1135 as callbacks for expand_stack_vars. */
1137 static bool
1138 stack_protect_decl_phase_1 (tree decl)
1140 return stack_protect_decl_phase (decl) == 1;
1143 static bool
1144 stack_protect_decl_phase_2 (tree decl)
1146 return stack_protect_decl_phase (decl) == 2;
1149 /* Ensure that variables in different stack protection phases conflict
1150 so that they are not merged and share the same stack slot. */
1152 static void
1153 add_stack_protection_conflicts (void)
1155 size_t i, j, n = stack_vars_num;
1156 unsigned char *phase;
1158 phase = XNEWVEC (unsigned char, n);
1159 for (i = 0; i < n; ++i)
1160 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1162 for (i = 0; i < n; ++i)
1164 unsigned char ph_i = phase[i];
1165 for (j = 0; j < i; ++j)
1166 if (ph_i != phase[j])
1167 add_stack_var_conflict (i, j);
1170 XDELETEVEC (phase);
1173 /* Create a decl for the guard at the top of the stack frame. */
1175 static void
1176 create_stack_guard (void)
1178 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1179 VAR_DECL, NULL, ptr_type_node);
1180 TREE_THIS_VOLATILE (guard) = 1;
1181 TREE_USED (guard) = 1;
1182 expand_one_stack_var (guard);
1183 crtl->stack_protect_guard = guard;
1186 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1187 expanding variables. Those variables that can be put into registers
1188 are allocated pseudos; those that can't are put on the stack.
1190 TOPLEVEL is true if this is the outermost BLOCK. */
1192 static HOST_WIDE_INT
1193 account_used_vars_for_block (tree block, bool toplevel)
1195 tree t;
1196 HOST_WIDE_INT size = 0;
1198 /* Expand all variables at this level. */
1199 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1200 if (TREE_USED (t))
1201 size += expand_one_var (t, toplevel, false);
1203 /* Expand all variables at containing levels. */
1204 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1205 size += account_used_vars_for_block (t, false);
1207 return size;
1210 /* Prepare for expanding variables. */
1211 static void
1212 init_vars_expansion (void)
1214 tree t;
1215 /* Set TREE_USED on all variables in the local_decls. */
1216 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1217 TREE_USED (TREE_VALUE (t)) = 1;
1219 /* Clear TREE_USED on all variables associated with a block scope. */
1220 clear_tree_used (DECL_INITIAL (current_function_decl));
1222 /* Initialize local stack smashing state. */
1223 has_protected_decls = false;
1224 has_short_buffer = false;
1227 /* Free up stack variable graph data. */
1228 static void
1229 fini_vars_expansion (void)
1231 size_t i, n = stack_vars_num;
1232 for (i = 0; i < n; i++)
1233 BITMAP_FREE (stack_vars[i].conflicts);
1234 XDELETEVEC (stack_vars);
1235 XDELETEVEC (stack_vars_sorted);
1236 stack_vars = NULL;
1237 stack_vars_alloc = stack_vars_num = 0;
1240 /* Make a fair guess for the size of the stack frame of the current
1241 function. This doesn't have to be exact, the result is only used
1242 in the inline heuristics. So we don't want to run the full stack
1243 var packing algorithm (which is quadratic in the number of stack
1244 vars). Instead, we calculate the total size of all stack vars.
1245 This turns out to be a pretty fair estimate -- packing of stack
1246 vars doesn't happen very often. */
1248 HOST_WIDE_INT
1249 estimated_stack_frame_size (void)
1251 HOST_WIDE_INT size = 0;
1252 size_t i;
1253 tree t, outer_block = DECL_INITIAL (current_function_decl);
1255 init_vars_expansion ();
1257 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1259 tree var = TREE_VALUE (t);
1261 if (TREE_USED (var))
1262 size += expand_one_var (var, true, false);
1263 TREE_USED (var) = 1;
1265 size += account_used_vars_for_block (outer_block, true);
1267 if (stack_vars_num > 0)
1269 /* Fake sorting the stack vars for account_stack_vars (). */
1270 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1271 for (i = 0; i < stack_vars_num; ++i)
1272 stack_vars_sorted[i] = i;
1273 size += account_stack_vars ();
1274 fini_vars_expansion ();
1277 return size;
1280 /* Expand all variables used in the function. */
1282 static void
1283 expand_used_vars (void)
1285 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1286 tree maybe_local_decls = NULL_TREE;
1287 unsigned i;
1289 /* Compute the phase of the stack frame for this function. */
1291 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1292 int off = STARTING_FRAME_OFFSET % align;
1293 frame_phase = off ? align - off : 0;
1296 init_vars_expansion ();
1298 for (i = 0; i < SA.map->num_partitions; i++)
1300 tree var = partition_to_var (SA.map, i);
1302 gcc_assert (is_gimple_reg (var));
1303 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1304 expand_one_var (var, true, true);
1305 else
1307 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1308 contain the default def (representing the parm or result itself)
1309 we don't do anything here. But those which don't contain the
1310 default def (representing a temporary based on the parm/result)
1311 we need to allocate space just like for normal VAR_DECLs. */
1312 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1314 expand_one_var (var, true, true);
1315 gcc_assert (SA.partition_to_pseudo[i]);
1320 /* At this point all variables on the local_decls with TREE_USED
1321 set are not associated with any block scope. Lay them out. */
1322 t = cfun->local_decls;
1323 cfun->local_decls = NULL_TREE;
1324 for (; t; t = next)
1326 tree var = TREE_VALUE (t);
1327 bool expand_now = false;
1329 next = TREE_CHAIN (t);
1331 /* Expanded above already. */
1332 if (is_gimple_reg (var))
1334 TREE_USED (var) = 0;
1335 goto next;
1337 /* We didn't set a block for static or extern because it's hard
1338 to tell the difference between a global variable (re)declared
1339 in a local scope, and one that's really declared there to
1340 begin with. And it doesn't really matter much, since we're
1341 not giving them stack space. Expand them now. */
1342 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1343 expand_now = true;
1345 /* If the variable is not associated with any block, then it
1346 was created by the optimizers, and could be live anywhere
1347 in the function. */
1348 else if (TREE_USED (var))
1349 expand_now = true;
1351 /* Finally, mark all variables on the list as used. We'll use
1352 this in a moment when we expand those associated with scopes. */
1353 TREE_USED (var) = 1;
1355 if (expand_now)
1356 expand_one_var (var, true, true);
1358 next:
1359 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1361 rtx rtl = DECL_RTL_IF_SET (var);
1363 /* Keep artificial non-ignored vars in cfun->local_decls
1364 chain until instantiate_decls. */
1365 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1367 TREE_CHAIN (t) = cfun->local_decls;
1368 cfun->local_decls = t;
1369 continue;
1371 else if (rtl == NULL_RTX)
1373 /* If rtl isn't set yet, which can happen e.g. with
1374 -fstack-protector, retry before returning from this
1375 function. */
1376 TREE_CHAIN (t) = maybe_local_decls;
1377 maybe_local_decls = t;
1378 continue;
1382 ggc_free (t);
1385 /* At this point, all variables within the block tree with TREE_USED
1386 set are actually used by the optimized function. Lay them out. */
1387 expand_used_vars_for_block (outer_block, true);
1389 if (stack_vars_num > 0)
1391 /* Due to the way alias sets work, no variables with non-conflicting
1392 alias sets may be assigned the same address. Add conflicts to
1393 reflect this. */
1394 add_alias_set_conflicts ();
1396 /* If stack protection is enabled, we don't share space between
1397 vulnerable data and non-vulnerable data. */
1398 if (flag_stack_protect)
1399 add_stack_protection_conflicts ();
1401 /* Now that we have collected all stack variables, and have computed a
1402 minimal interference graph, attempt to save some stack space. */
1403 partition_stack_vars ();
1404 if (dump_file)
1405 dump_stack_var_partition ();
1408 /* There are several conditions under which we should create a
1409 stack guard: protect-all, alloca used, protected decls present. */
1410 if (flag_stack_protect == 2
1411 || (flag_stack_protect
1412 && (cfun->calls_alloca || has_protected_decls)))
1413 create_stack_guard ();
1415 /* Assign rtl to each variable based on these partitions. */
1416 if (stack_vars_num > 0)
1418 /* Reorder decls to be protected by iterating over the variables
1419 array multiple times, and allocating out of each phase in turn. */
1420 /* ??? We could probably integrate this into the qsort we did
1421 earlier, such that we naturally see these variables first,
1422 and thus naturally allocate things in the right order. */
1423 if (has_protected_decls)
1425 /* Phase 1 contains only character arrays. */
1426 expand_stack_vars (stack_protect_decl_phase_1);
1428 /* Phase 2 contains other kinds of arrays. */
1429 if (flag_stack_protect == 2)
1430 expand_stack_vars (stack_protect_decl_phase_2);
1433 expand_stack_vars (NULL);
1435 fini_vars_expansion ();
1438 /* If there were any artificial non-ignored vars without rtl
1439 found earlier, see if deferred stack allocation hasn't assigned
1440 rtl to them. */
1441 for (t = maybe_local_decls; t; t = next)
1443 tree var = TREE_VALUE (t);
1444 rtx rtl = DECL_RTL_IF_SET (var);
1446 next = TREE_CHAIN (t);
1448 /* Keep artificial non-ignored vars in cfun->local_decls
1449 chain until instantiate_decls. */
1450 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1452 TREE_CHAIN (t) = cfun->local_decls;
1453 cfun->local_decls = t;
1454 continue;
1457 ggc_free (t);
1460 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1461 if (STACK_ALIGNMENT_NEEDED)
1463 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1464 if (!FRAME_GROWS_DOWNWARD)
1465 frame_offset += align - 1;
1466 frame_offset &= -align;
1471 /* If we need to produce a detailed dump, print the tree representation
1472 for STMT to the dump file. SINCE is the last RTX after which the RTL
1473 generated for STMT should have been appended. */
1475 static void
1476 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1478 if (dump_file && (dump_flags & TDF_DETAILS))
1480 fprintf (dump_file, "\n;; ");
1481 print_gimple_stmt (dump_file, stmt, 0,
1482 TDF_SLIM | (dump_flags & TDF_LINENO));
1483 fprintf (dump_file, "\n");
1485 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1489 /* Maps the blocks that do not contain tree labels to rtx labels. */
1491 static struct pointer_map_t *lab_rtx_for_bb;
1493 /* Returns the label_rtx expression for a label starting basic block BB. */
1495 static rtx
1496 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1498 gimple_stmt_iterator gsi;
1499 tree lab;
1500 gimple lab_stmt;
1501 void **elt;
1503 if (bb->flags & BB_RTL)
1504 return block_label (bb);
1506 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1507 if (elt)
1508 return (rtx) *elt;
1510 /* Find the tree label if it is present. */
1512 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1514 lab_stmt = gsi_stmt (gsi);
1515 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1516 break;
1518 lab = gimple_label_label (lab_stmt);
1519 if (DECL_NONLOCAL (lab))
1520 break;
1522 return label_rtx (lab);
1525 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1526 *elt = gen_label_rtx ();
1527 return (rtx) *elt;
1531 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1532 of a basic block where we just expanded the conditional at the end,
1533 possibly clean up the CFG and instruction sequence. LAST is the
1534 last instruction before the just emitted jump sequence. */
1536 static void
1537 maybe_cleanup_end_of_block (edge e, rtx last)
1539 /* Special case: when jumpif decides that the condition is
1540 trivial it emits an unconditional jump (and the necessary
1541 barrier). But we still have two edges, the fallthru one is
1542 wrong. purge_dead_edges would clean this up later. Unfortunately
1543 we have to insert insns (and split edges) before
1544 find_many_sub_basic_blocks and hence before purge_dead_edges.
1545 But splitting edges might create new blocks which depend on the
1546 fact that if there are two edges there's no barrier. So the
1547 barrier would get lost and verify_flow_info would ICE. Instead
1548 of auditing all edge splitters to care for the barrier (which
1549 normally isn't there in a cleaned CFG), fix it here. */
1550 if (BARRIER_P (get_last_insn ()))
1552 rtx insn;
1553 remove_edge (e);
1554 /* Now, we have a single successor block, if we have insns to
1555 insert on the remaining edge we potentially will insert
1556 it at the end of this block (if the dest block isn't feasible)
1557 in order to avoid splitting the edge. This insertion will take
1558 place in front of the last jump. But we might have emitted
1559 multiple jumps (conditional and one unconditional) to the
1560 same destination. Inserting in front of the last one then
1561 is a problem. See PR 40021. We fix this by deleting all
1562 jumps except the last unconditional one. */
1563 insn = PREV_INSN (get_last_insn ());
1564 /* Make sure we have an unconditional jump. Otherwise we're
1565 confused. */
1566 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1567 for (insn = PREV_INSN (insn); insn != last;)
1569 insn = PREV_INSN (insn);
1570 if (JUMP_P (NEXT_INSN (insn)))
1571 delete_insn (NEXT_INSN (insn));
1576 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1577 Returns a new basic block if we've terminated the current basic
1578 block and created a new one. */
1580 static basic_block
1581 expand_gimple_cond (basic_block bb, gimple stmt)
1583 basic_block new_bb, dest;
1584 edge new_edge;
1585 edge true_edge;
1586 edge false_edge;
1587 rtx last2, last;
1588 enum tree_code code;
1589 tree op0, op1;
1591 code = gimple_cond_code (stmt);
1592 op0 = gimple_cond_lhs (stmt);
1593 op1 = gimple_cond_rhs (stmt);
1594 /* We're sometimes presented with such code:
1595 D.123_1 = x < y;
1596 if (D.123_1 != 0)
1598 This would expand to two comparisons which then later might
1599 be cleaned up by combine. But some pattern matchers like if-conversion
1600 work better when there's only one compare, so make up for this
1601 here as special exception if TER would have made the same change. */
1602 if (gimple_cond_single_var_p (stmt)
1603 && SA.values
1604 && TREE_CODE (op0) == SSA_NAME
1605 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1607 gimple second = SSA_NAME_DEF_STMT (op0);
1608 if (gimple_code (second) == GIMPLE_ASSIGN)
1610 enum tree_code code2 = gimple_assign_rhs_code (second);
1611 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1613 code = code2;
1614 op0 = gimple_assign_rhs1 (second);
1615 op1 = gimple_assign_rhs2 (second);
1617 /* If jumps are cheap turn some more codes into
1618 jumpy sequences. */
1619 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1621 if ((code2 == BIT_AND_EXPR
1622 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1623 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1624 || code2 == TRUTH_AND_EXPR)
1626 code = TRUTH_ANDIF_EXPR;
1627 op0 = gimple_assign_rhs1 (second);
1628 op1 = gimple_assign_rhs2 (second);
1630 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1632 code = TRUTH_ORIF_EXPR;
1633 op0 = gimple_assign_rhs1 (second);
1634 op1 = gimple_assign_rhs2 (second);
1640 last2 = last = get_last_insn ();
1642 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1643 if (gimple_has_location (stmt))
1645 set_curr_insn_source_location (gimple_location (stmt));
1646 set_curr_insn_block (gimple_block (stmt));
1649 /* These flags have no purpose in RTL land. */
1650 true_edge->flags &= ~EDGE_TRUE_VALUE;
1651 false_edge->flags &= ~EDGE_FALSE_VALUE;
1653 /* We can either have a pure conditional jump with one fallthru edge or
1654 two-way jump that needs to be decomposed into two basic blocks. */
1655 if (false_edge->dest == bb->next_bb)
1657 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1658 true_edge->probability);
1659 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1660 if (true_edge->goto_locus)
1662 set_curr_insn_source_location (true_edge->goto_locus);
1663 set_curr_insn_block (true_edge->goto_block);
1664 true_edge->goto_locus = curr_insn_locator ();
1666 true_edge->goto_block = NULL;
1667 false_edge->flags |= EDGE_FALLTHRU;
1668 maybe_cleanup_end_of_block (false_edge, last);
1669 return NULL;
1671 if (true_edge->dest == bb->next_bb)
1673 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1674 false_edge->probability);
1675 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1676 if (false_edge->goto_locus)
1678 set_curr_insn_source_location (false_edge->goto_locus);
1679 set_curr_insn_block (false_edge->goto_block);
1680 false_edge->goto_locus = curr_insn_locator ();
1682 false_edge->goto_block = NULL;
1683 true_edge->flags |= EDGE_FALLTHRU;
1684 maybe_cleanup_end_of_block (true_edge, last);
1685 return NULL;
1688 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1689 true_edge->probability);
1690 last = get_last_insn ();
1691 if (false_edge->goto_locus)
1693 set_curr_insn_source_location (false_edge->goto_locus);
1694 set_curr_insn_block (false_edge->goto_block);
1695 false_edge->goto_locus = curr_insn_locator ();
1697 false_edge->goto_block = NULL;
1698 emit_jump (label_rtx_for_bb (false_edge->dest));
1700 BB_END (bb) = last;
1701 if (BARRIER_P (BB_END (bb)))
1702 BB_END (bb) = PREV_INSN (BB_END (bb));
1703 update_bb_for_insn (bb);
1705 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1706 dest = false_edge->dest;
1707 redirect_edge_succ (false_edge, new_bb);
1708 false_edge->flags |= EDGE_FALLTHRU;
1709 new_bb->count = false_edge->count;
1710 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1711 new_edge = make_edge (new_bb, dest, 0);
1712 new_edge->probability = REG_BR_PROB_BASE;
1713 new_edge->count = new_bb->count;
1714 if (BARRIER_P (BB_END (new_bb)))
1715 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1716 update_bb_for_insn (new_bb);
1718 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1720 if (true_edge->goto_locus)
1722 set_curr_insn_source_location (true_edge->goto_locus);
1723 set_curr_insn_block (true_edge->goto_block);
1724 true_edge->goto_locus = curr_insn_locator ();
1726 true_edge->goto_block = NULL;
1728 return new_bb;
1731 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1732 statement STMT. */
1734 static void
1735 expand_call_stmt (gimple stmt)
1737 tree exp;
1738 tree lhs = gimple_call_lhs (stmt);
1739 size_t i;
1740 bool builtin_p;
1741 tree decl;
1743 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1745 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1746 decl = gimple_call_fndecl (stmt);
1747 builtin_p = decl && DECL_BUILT_IN (decl);
1749 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1750 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1752 for (i = 0; i < gimple_call_num_args (stmt); i++)
1754 tree arg = gimple_call_arg (stmt, i);
1755 gimple def;
1756 /* TER addresses into arguments of builtin functions so we have a
1757 chance to infer more correct alignment information. See PR39954. */
1758 if (builtin_p
1759 && TREE_CODE (arg) == SSA_NAME
1760 && (def = get_gimple_for_ssa_name (arg))
1761 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1762 arg = gimple_assign_rhs1 (def);
1763 CALL_EXPR_ARG (exp, i) = arg;
1766 if (gimple_has_side_effects (stmt))
1767 TREE_SIDE_EFFECTS (exp) = 1;
1769 if (gimple_call_nothrow_p (stmt))
1770 TREE_NOTHROW (exp) = 1;
1772 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1773 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1774 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1775 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1776 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1777 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1778 TREE_BLOCK (exp) = gimple_block (stmt);
1780 if (lhs)
1781 expand_assignment (lhs, exp, false);
1782 else
1783 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1786 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1787 STMT that doesn't require special handling for outgoing edges. That
1788 is no tailcalls and no GIMPLE_COND. */
1790 static void
1791 expand_gimple_stmt_1 (gimple stmt)
1793 tree op0;
1794 switch (gimple_code (stmt))
1796 case GIMPLE_GOTO:
1797 op0 = gimple_goto_dest (stmt);
1798 if (TREE_CODE (op0) == LABEL_DECL)
1799 expand_goto (op0);
1800 else
1801 expand_computed_goto (op0);
1802 break;
1803 case GIMPLE_LABEL:
1804 expand_label (gimple_label_label (stmt));
1805 break;
1806 case GIMPLE_NOP:
1807 case GIMPLE_PREDICT:
1808 break;
1809 case GIMPLE_SWITCH:
1810 expand_case (stmt);
1811 break;
1812 case GIMPLE_ASM:
1813 expand_asm_stmt (stmt);
1814 break;
1815 case GIMPLE_CALL:
1816 expand_call_stmt (stmt);
1817 break;
1819 case GIMPLE_RETURN:
1820 op0 = gimple_return_retval (stmt);
1822 if (op0 && op0 != error_mark_node)
1824 tree result = DECL_RESULT (current_function_decl);
1826 /* If we are not returning the current function's RESULT_DECL,
1827 build an assignment to it. */
1828 if (op0 != result)
1830 /* I believe that a function's RESULT_DECL is unique. */
1831 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1833 /* ??? We'd like to use simply expand_assignment here,
1834 but this fails if the value is of BLKmode but the return
1835 decl is a register. expand_return has special handling
1836 for this combination, which eventually should move
1837 to common code. See comments there. Until then, let's
1838 build a modify expression :-/ */
1839 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1840 result, op0);
1843 if (!op0)
1844 expand_null_return ();
1845 else
1846 expand_return (op0);
1847 break;
1849 case GIMPLE_ASSIGN:
1851 tree lhs = gimple_assign_lhs (stmt);
1853 /* Tree expand used to fiddle with |= and &= of two bitfield
1854 COMPONENT_REFs here. This can't happen with gimple, the LHS
1855 of binary assigns must be a gimple reg. */
1857 if (TREE_CODE (lhs) != SSA_NAME
1858 || get_gimple_rhs_class (gimple_expr_code (stmt))
1859 == GIMPLE_SINGLE_RHS)
1861 tree rhs = gimple_assign_rhs1 (stmt);
1862 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1863 == GIMPLE_SINGLE_RHS);
1864 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1865 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1866 expand_assignment (lhs, rhs,
1867 gimple_assign_nontemporal_move_p (stmt));
1869 else
1871 rtx target, temp;
1872 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1873 struct separate_ops ops;
1874 bool promoted = false;
1876 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1877 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1878 promoted = true;
1880 ops.code = gimple_assign_rhs_code (stmt);
1881 ops.type = TREE_TYPE (lhs);
1882 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1884 case GIMPLE_BINARY_RHS:
1885 ops.op1 = gimple_assign_rhs2 (stmt);
1886 /* Fallthru */
1887 case GIMPLE_UNARY_RHS:
1888 ops.op0 = gimple_assign_rhs1 (stmt);
1889 break;
1890 default:
1891 gcc_unreachable ();
1893 ops.location = gimple_location (stmt);
1895 /* If we want to use a nontemporal store, force the value to
1896 register first. If we store into a promoted register,
1897 don't directly expand to target. */
1898 temp = nontemporal || promoted ? NULL_RTX : target;
1899 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1900 EXPAND_NORMAL);
1902 if (temp == target)
1904 else if (promoted)
1906 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1907 /* If TEMP is a VOIDmode constant, use convert_modes to make
1908 sure that we properly convert it. */
1909 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1911 temp = convert_modes (GET_MODE (target),
1912 TYPE_MODE (ops.type),
1913 temp, unsignedp);
1914 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1915 GET_MODE (target), temp, unsignedp);
1918 convert_move (SUBREG_REG (target), temp, unsignedp);
1920 else if (nontemporal && emit_storent_insn (target, temp))
1922 else
1924 temp = force_operand (temp, target);
1925 if (temp != target)
1926 emit_move_insn (target, temp);
1930 break;
1932 default:
1933 gcc_unreachable ();
1937 /* Expand one gimple statement STMT and return the last RTL instruction
1938 before any of the newly generated ones.
1940 In addition to generating the necessary RTL instructions this also
1941 sets REG_EH_REGION notes if necessary and sets the current source
1942 location for diagnostics. */
1944 static rtx
1945 expand_gimple_stmt (gimple stmt)
1947 int lp_nr = 0;
1948 rtx last = NULL;
1949 location_t saved_location = input_location;
1951 last = get_last_insn ();
1953 /* If this is an expression of some kind and it has an associated line
1954 number, then emit the line number before expanding the expression.
1956 We need to save and restore the file and line information so that
1957 errors discovered during expansion are emitted with the right
1958 information. It would be better of the diagnostic routines
1959 used the file/line information embedded in the tree nodes rather
1960 than globals. */
1961 gcc_assert (cfun);
1963 if (gimple_has_location (stmt))
1965 input_location = gimple_location (stmt);
1966 set_curr_insn_source_location (input_location);
1968 /* Record where the insns produced belong. */
1969 set_curr_insn_block (gimple_block (stmt));
1972 expand_gimple_stmt_1 (stmt);
1973 /* Free any temporaries used to evaluate this statement. */
1974 free_temp_slots ();
1976 input_location = saved_location;
1978 /* Mark all insns that may trap. */
1979 lp_nr = lookup_stmt_eh_lp (stmt);
1980 if (lp_nr)
1982 rtx insn;
1983 for (insn = next_real_insn (last); insn;
1984 insn = next_real_insn (insn))
1986 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1987 /* If we want exceptions for non-call insns, any
1988 may_trap_p instruction may throw. */
1989 && GET_CODE (PATTERN (insn)) != CLOBBER
1990 && GET_CODE (PATTERN (insn)) != USE
1991 && insn_could_throw_p (insn))
1992 make_reg_eh_region_note (insn, 0, lp_nr);
1996 return last;
1999 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2000 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2001 generated a tail call (something that might be denied by the ABI
2002 rules governing the call; see calls.c).
2004 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2005 can still reach the rest of BB. The case here is __builtin_sqrt,
2006 where the NaN result goes through the external function (with a
2007 tailcall) and the normal result happens via a sqrt instruction. */
2009 static basic_block
2010 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2012 rtx last2, last;
2013 edge e;
2014 edge_iterator ei;
2015 int probability;
2016 gcov_type count;
2018 last2 = last = expand_gimple_stmt (stmt);
2020 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2021 if (CALL_P (last) && SIBLING_CALL_P (last))
2022 goto found;
2024 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2026 *can_fallthru = true;
2027 return NULL;
2029 found:
2030 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2031 Any instructions emitted here are about to be deleted. */
2032 do_pending_stack_adjust ();
2034 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2035 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2036 EH or abnormal edges, we shouldn't have created a tail call in
2037 the first place. So it seems to me we should just be removing
2038 all edges here, or redirecting the existing fallthru edge to
2039 the exit block. */
2041 probability = 0;
2042 count = 0;
2044 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2046 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2048 if (e->dest != EXIT_BLOCK_PTR)
2050 e->dest->count -= e->count;
2051 e->dest->frequency -= EDGE_FREQUENCY (e);
2052 if (e->dest->count < 0)
2053 e->dest->count = 0;
2054 if (e->dest->frequency < 0)
2055 e->dest->frequency = 0;
2057 count += e->count;
2058 probability += e->probability;
2059 remove_edge (e);
2061 else
2062 ei_next (&ei);
2065 /* This is somewhat ugly: the call_expr expander often emits instructions
2066 after the sibcall (to perform the function return). These confuse the
2067 find_many_sub_basic_blocks code, so we need to get rid of these. */
2068 last = NEXT_INSN (last);
2069 gcc_assert (BARRIER_P (last));
2071 *can_fallthru = false;
2072 while (NEXT_INSN (last))
2074 /* For instance an sqrt builtin expander expands if with
2075 sibcall in the then and label for `else`. */
2076 if (LABEL_P (NEXT_INSN (last)))
2078 *can_fallthru = true;
2079 break;
2081 delete_insn (NEXT_INSN (last));
2084 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2085 e->probability += probability;
2086 e->count += count;
2087 BB_END (bb) = last;
2088 update_bb_for_insn (bb);
2090 if (NEXT_INSN (last))
2092 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2094 last = BB_END (bb);
2095 if (BARRIER_P (last))
2096 BB_END (bb) = PREV_INSN (last);
2099 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2101 return bb;
2104 /* Return the difference between the floor and the truncated result of
2105 a signed division by OP1 with remainder MOD. */
2106 static rtx
2107 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2109 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2110 return gen_rtx_IF_THEN_ELSE
2111 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2112 gen_rtx_IF_THEN_ELSE
2113 (mode, gen_rtx_LT (BImode,
2114 gen_rtx_DIV (mode, op1, mod),
2115 const0_rtx),
2116 constm1_rtx, const0_rtx),
2117 const0_rtx);
2120 /* Return the difference between the ceil and the truncated result of
2121 a signed division by OP1 with remainder MOD. */
2122 static rtx
2123 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2125 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2126 return gen_rtx_IF_THEN_ELSE
2127 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2128 gen_rtx_IF_THEN_ELSE
2129 (mode, gen_rtx_GT (BImode,
2130 gen_rtx_DIV (mode, op1, mod),
2131 const0_rtx),
2132 const1_rtx, const0_rtx),
2133 const0_rtx);
2136 /* Return the difference between the ceil and the truncated result of
2137 an unsigned division by OP1 with remainder MOD. */
2138 static rtx
2139 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2141 /* (mod != 0 ? 1 : 0) */
2142 return gen_rtx_IF_THEN_ELSE
2143 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2144 const1_rtx, const0_rtx);
2147 /* Return the difference between the rounded and the truncated result
2148 of a signed division by OP1 with remainder MOD. Halfway cases are
2149 rounded away from zero, rather than to the nearest even number. */
2150 static rtx
2151 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2153 /* (abs (mod) >= abs (op1) - abs (mod)
2154 ? (op1 / mod > 0 ? 1 : -1)
2155 : 0) */
2156 return gen_rtx_IF_THEN_ELSE
2157 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2158 gen_rtx_MINUS (mode,
2159 gen_rtx_ABS (mode, op1),
2160 gen_rtx_ABS (mode, mod))),
2161 gen_rtx_IF_THEN_ELSE
2162 (mode, gen_rtx_GT (BImode,
2163 gen_rtx_DIV (mode, op1, mod),
2164 const0_rtx),
2165 const1_rtx, constm1_rtx),
2166 const0_rtx);
2169 /* Return the difference between the rounded and the truncated result
2170 of a unsigned division by OP1 with remainder MOD. Halfway cases
2171 are rounded away from zero, rather than to the nearest even
2172 number. */
2173 static rtx
2174 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2176 /* (mod >= op1 - mod ? 1 : 0) */
2177 return gen_rtx_IF_THEN_ELSE
2178 (mode, gen_rtx_GE (BImode, mod,
2179 gen_rtx_MINUS (mode, op1, mod)),
2180 const1_rtx, const0_rtx);
2183 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2184 any rtl. */
2186 static rtx
2187 convert_debug_memory_address (enum machine_mode mode, rtx x)
2189 enum machine_mode xmode = GET_MODE (x);
2191 #ifndef POINTERS_EXTEND_UNSIGNED
2192 gcc_assert (mode == Pmode);
2193 gcc_assert (xmode == mode || xmode == VOIDmode);
2194 #else
2195 gcc_assert (mode == Pmode || mode == ptr_mode);
2197 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2198 return x;
2200 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2201 x = simplify_gen_subreg (mode, x, xmode,
2202 subreg_lowpart_offset
2203 (mode, xmode));
2204 else if (POINTERS_EXTEND_UNSIGNED > 0)
2205 x = gen_rtx_ZERO_EXTEND (mode, x);
2206 else if (!POINTERS_EXTEND_UNSIGNED)
2207 x = gen_rtx_SIGN_EXTEND (mode, x);
2208 else
2209 gcc_unreachable ();
2210 #endif /* POINTERS_EXTEND_UNSIGNED */
2212 return x;
2215 /* Return an RTX equivalent to the value of the tree expression
2216 EXP. */
2218 static rtx
2219 expand_debug_expr (tree exp)
2221 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2222 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2223 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2224 addr_space_t as;
2225 enum machine_mode address_mode;
2227 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2229 case tcc_expression:
2230 switch (TREE_CODE (exp))
2232 case COND_EXPR:
2233 case DOT_PROD_EXPR:
2234 goto ternary;
2236 case TRUTH_ANDIF_EXPR:
2237 case TRUTH_ORIF_EXPR:
2238 case TRUTH_AND_EXPR:
2239 case TRUTH_OR_EXPR:
2240 case TRUTH_XOR_EXPR:
2241 goto binary;
2243 case TRUTH_NOT_EXPR:
2244 goto unary;
2246 default:
2247 break;
2249 break;
2251 ternary:
2252 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2253 if (!op2)
2254 return NULL_RTX;
2255 /* Fall through. */
2257 binary:
2258 case tcc_binary:
2259 case tcc_comparison:
2260 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2261 if (!op1)
2262 return NULL_RTX;
2263 /* Fall through. */
2265 unary:
2266 case tcc_unary:
2267 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2268 if (!op0)
2269 return NULL_RTX;
2270 break;
2272 case tcc_type:
2273 case tcc_statement:
2274 gcc_unreachable ();
2276 case tcc_constant:
2277 case tcc_exceptional:
2278 case tcc_declaration:
2279 case tcc_reference:
2280 case tcc_vl_exp:
2281 break;
2284 switch (TREE_CODE (exp))
2286 case STRING_CST:
2287 if (!lookup_constant_def (exp))
2289 if (strlen (TREE_STRING_POINTER (exp)) + 1
2290 != (size_t) TREE_STRING_LENGTH (exp))
2291 return NULL_RTX;
2292 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2293 op0 = gen_rtx_MEM (BLKmode, op0);
2294 set_mem_attributes (op0, exp, 0);
2295 return op0;
2297 /* Fall through... */
2299 case INTEGER_CST:
2300 case REAL_CST:
2301 case FIXED_CST:
2302 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2303 return op0;
2305 case COMPLEX_CST:
2306 gcc_assert (COMPLEX_MODE_P (mode));
2307 op0 = expand_debug_expr (TREE_REALPART (exp));
2308 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2309 return gen_rtx_CONCAT (mode, op0, op1);
2311 case DEBUG_EXPR_DECL:
2312 op0 = DECL_RTL_IF_SET (exp);
2314 if (op0)
2315 return op0;
2317 op0 = gen_rtx_DEBUG_EXPR (mode);
2318 DEBUG_EXPR_TREE_DECL (op0) = exp;
2319 SET_DECL_RTL (exp, op0);
2321 return op0;
2323 case VAR_DECL:
2324 case PARM_DECL:
2325 case FUNCTION_DECL:
2326 case LABEL_DECL:
2327 case CONST_DECL:
2328 case RESULT_DECL:
2329 op0 = DECL_RTL_IF_SET (exp);
2331 /* This decl was probably optimized away. */
2332 if (!op0)
2334 if (TREE_CODE (exp) != VAR_DECL
2335 || DECL_EXTERNAL (exp)
2336 || !TREE_STATIC (exp)
2337 || !DECL_NAME (exp)
2338 || DECL_HARD_REGISTER (exp)
2339 || mode == VOIDmode)
2340 return NULL;
2342 op0 = DECL_RTL (exp);
2343 SET_DECL_RTL (exp, NULL);
2344 if (!MEM_P (op0)
2345 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2346 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2347 return NULL;
2349 else
2350 op0 = copy_rtx (op0);
2352 if (GET_MODE (op0) == BLKmode
2353 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2354 below would ICE. While it is likely a FE bug,
2355 try to be robust here. See PR43166. */
2356 || mode == BLKmode)
2358 gcc_assert (MEM_P (op0));
2359 op0 = adjust_address_nv (op0, mode, 0);
2360 return op0;
2363 /* Fall through. */
2365 adjust_mode:
2366 case PAREN_EXPR:
2367 case NOP_EXPR:
2368 case CONVERT_EXPR:
2370 enum machine_mode inner_mode = GET_MODE (op0);
2372 if (mode == inner_mode)
2373 return op0;
2375 if (inner_mode == VOIDmode)
2377 if (TREE_CODE (exp) == SSA_NAME)
2378 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2379 else
2380 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2381 if (mode == inner_mode)
2382 return op0;
2385 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2387 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2388 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2389 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2390 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2391 else
2392 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2394 else if (FLOAT_MODE_P (mode))
2396 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2397 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2398 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2399 else
2400 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2402 else if (FLOAT_MODE_P (inner_mode))
2404 if (unsignedp)
2405 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2406 else
2407 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2409 else if (CONSTANT_P (op0)
2410 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2411 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2412 subreg_lowpart_offset (mode,
2413 inner_mode));
2414 else if (unsignedp)
2415 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2416 else
2417 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2419 return op0;
2422 case INDIRECT_REF:
2423 case ALIGN_INDIRECT_REF:
2424 case MISALIGNED_INDIRECT_REF:
2425 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2426 if (!op0)
2427 return NULL;
2429 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2431 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2432 address_mode = targetm.addr_space.address_mode (as);
2434 else
2436 as = ADDR_SPACE_GENERIC;
2437 address_mode = Pmode;
2440 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2442 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2443 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
2446 op0 = gen_rtx_MEM (mode, op0);
2448 set_mem_attributes (op0, exp, 0);
2449 set_mem_addr_space (op0, as);
2451 return op0;
2453 case TARGET_MEM_REF:
2454 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2455 return NULL;
2457 op0 = expand_debug_expr
2458 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2459 if (!op0)
2460 return NULL;
2462 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2464 op0 = gen_rtx_MEM (mode, op0);
2466 set_mem_attributes (op0, exp, 0);
2467 set_mem_addr_space (op0, as);
2469 return op0;
2471 case ARRAY_REF:
2472 case ARRAY_RANGE_REF:
2473 case COMPONENT_REF:
2474 case BIT_FIELD_REF:
2475 case REALPART_EXPR:
2476 case IMAGPART_EXPR:
2477 case VIEW_CONVERT_EXPR:
2479 enum machine_mode mode1;
2480 HOST_WIDE_INT bitsize, bitpos;
2481 tree offset;
2482 int volatilep = 0;
2483 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2484 &mode1, &unsignedp, &volatilep, false);
2485 rtx orig_op0;
2487 if (bitsize == 0)
2488 return NULL;
2490 orig_op0 = op0 = expand_debug_expr (tem);
2492 if (!op0)
2493 return NULL;
2495 if (offset)
2497 enum machine_mode addrmode, offmode;
2499 gcc_assert (MEM_P (op0));
2501 op0 = XEXP (op0, 0);
2502 addrmode = GET_MODE (op0);
2503 if (addrmode == VOIDmode)
2504 addrmode = Pmode;
2506 op1 = expand_debug_expr (offset);
2507 if (!op1)
2508 return NULL;
2510 offmode = GET_MODE (op1);
2511 if (offmode == VOIDmode)
2512 offmode = TYPE_MODE (TREE_TYPE (offset));
2514 if (addrmode != offmode)
2515 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2516 subreg_lowpart_offset (addrmode,
2517 offmode));
2519 /* Don't use offset_address here, we don't need a
2520 recognizable address, and we don't want to generate
2521 code. */
2522 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2525 if (MEM_P (op0))
2527 if (mode1 == VOIDmode)
2528 /* Bitfield. */
2529 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2530 if (bitpos >= BITS_PER_UNIT)
2532 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2533 bitpos %= BITS_PER_UNIT;
2535 else if (bitpos < 0)
2537 HOST_WIDE_INT units
2538 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2539 op0 = adjust_address_nv (op0, mode1, units);
2540 bitpos += units * BITS_PER_UNIT;
2542 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2543 op0 = adjust_address_nv (op0, mode, 0);
2544 else if (GET_MODE (op0) != mode1)
2545 op0 = adjust_address_nv (op0, mode1, 0);
2546 else
2547 op0 = copy_rtx (op0);
2548 if (op0 == orig_op0)
2549 op0 = shallow_copy_rtx (op0);
2550 set_mem_attributes (op0, exp, 0);
2553 if (bitpos == 0 && mode == GET_MODE (op0))
2554 return op0;
2556 if (bitpos < 0)
2557 return NULL;
2559 if ((bitpos % BITS_PER_UNIT) == 0
2560 && bitsize == GET_MODE_BITSIZE (mode1))
2562 enum machine_mode opmode = GET_MODE (op0);
2564 gcc_assert (opmode != BLKmode);
2566 if (opmode == VOIDmode)
2567 opmode = mode1;
2569 /* This condition may hold if we're expanding the address
2570 right past the end of an array that turned out not to
2571 be addressable (i.e., the address was only computed in
2572 debug stmts). The gen_subreg below would rightfully
2573 crash, and the address doesn't really exist, so just
2574 drop it. */
2575 if (bitpos >= GET_MODE_BITSIZE (opmode))
2576 return NULL;
2578 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2579 return simplify_gen_subreg (mode, op0, opmode,
2580 bitpos / BITS_PER_UNIT);
2583 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2584 && TYPE_UNSIGNED (TREE_TYPE (exp))
2585 ? SIGN_EXTRACT
2586 : ZERO_EXTRACT, mode,
2587 GET_MODE (op0) != VOIDmode
2588 ? GET_MODE (op0) : mode1,
2589 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2592 case ABS_EXPR:
2593 return gen_rtx_ABS (mode, op0);
2595 case NEGATE_EXPR:
2596 return gen_rtx_NEG (mode, op0);
2598 case BIT_NOT_EXPR:
2599 return gen_rtx_NOT (mode, op0);
2601 case FLOAT_EXPR:
2602 if (unsignedp)
2603 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2604 else
2605 return gen_rtx_FLOAT (mode, op0);
2607 case FIX_TRUNC_EXPR:
2608 if (unsignedp)
2609 return gen_rtx_UNSIGNED_FIX (mode, op0);
2610 else
2611 return gen_rtx_FIX (mode, op0);
2613 case POINTER_PLUS_EXPR:
2614 case PLUS_EXPR:
2615 return gen_rtx_PLUS (mode, op0, op1);
2617 case MINUS_EXPR:
2618 return gen_rtx_MINUS (mode, op0, op1);
2620 case MULT_EXPR:
2621 return gen_rtx_MULT (mode, op0, op1);
2623 case RDIV_EXPR:
2624 case TRUNC_DIV_EXPR:
2625 case EXACT_DIV_EXPR:
2626 if (unsignedp)
2627 return gen_rtx_UDIV (mode, op0, op1);
2628 else
2629 return gen_rtx_DIV (mode, op0, op1);
2631 case TRUNC_MOD_EXPR:
2632 if (unsignedp)
2633 return gen_rtx_UMOD (mode, op0, op1);
2634 else
2635 return gen_rtx_MOD (mode, op0, op1);
2637 case FLOOR_DIV_EXPR:
2638 if (unsignedp)
2639 return gen_rtx_UDIV (mode, op0, op1);
2640 else
2642 rtx div = gen_rtx_DIV (mode, op0, op1);
2643 rtx mod = gen_rtx_MOD (mode, op0, op1);
2644 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2645 return gen_rtx_PLUS (mode, div, adj);
2648 case FLOOR_MOD_EXPR:
2649 if (unsignedp)
2650 return gen_rtx_UMOD (mode, op0, op1);
2651 else
2653 rtx mod = gen_rtx_MOD (mode, op0, op1);
2654 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2655 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2656 return gen_rtx_PLUS (mode, mod, adj);
2659 case CEIL_DIV_EXPR:
2660 if (unsignedp)
2662 rtx div = gen_rtx_UDIV (mode, op0, op1);
2663 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2664 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2665 return gen_rtx_PLUS (mode, div, adj);
2667 else
2669 rtx div = gen_rtx_DIV (mode, op0, op1);
2670 rtx mod = gen_rtx_MOD (mode, op0, op1);
2671 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2672 return gen_rtx_PLUS (mode, div, adj);
2675 case CEIL_MOD_EXPR:
2676 if (unsignedp)
2678 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2679 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2680 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2681 return gen_rtx_PLUS (mode, mod, adj);
2683 else
2685 rtx mod = gen_rtx_MOD (mode, op0, op1);
2686 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2687 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2688 return gen_rtx_PLUS (mode, mod, adj);
2691 case ROUND_DIV_EXPR:
2692 if (unsignedp)
2694 rtx div = gen_rtx_UDIV (mode, op0, op1);
2695 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2696 rtx adj = round_udiv_adjust (mode, mod, op1);
2697 return gen_rtx_PLUS (mode, div, adj);
2699 else
2701 rtx div = gen_rtx_DIV (mode, op0, op1);
2702 rtx mod = gen_rtx_MOD (mode, op0, op1);
2703 rtx adj = round_sdiv_adjust (mode, mod, op1);
2704 return gen_rtx_PLUS (mode, div, adj);
2707 case ROUND_MOD_EXPR:
2708 if (unsignedp)
2710 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2711 rtx adj = round_udiv_adjust (mode, mod, op1);
2712 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2713 return gen_rtx_PLUS (mode, mod, adj);
2715 else
2717 rtx mod = gen_rtx_MOD (mode, op0, op1);
2718 rtx adj = round_sdiv_adjust (mode, mod, op1);
2719 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2720 return gen_rtx_PLUS (mode, mod, adj);
2723 case LSHIFT_EXPR:
2724 return gen_rtx_ASHIFT (mode, op0, op1);
2726 case RSHIFT_EXPR:
2727 if (unsignedp)
2728 return gen_rtx_LSHIFTRT (mode, op0, op1);
2729 else
2730 return gen_rtx_ASHIFTRT (mode, op0, op1);
2732 case LROTATE_EXPR:
2733 return gen_rtx_ROTATE (mode, op0, op1);
2735 case RROTATE_EXPR:
2736 return gen_rtx_ROTATERT (mode, op0, op1);
2738 case MIN_EXPR:
2739 if (unsignedp)
2740 return gen_rtx_UMIN (mode, op0, op1);
2741 else
2742 return gen_rtx_SMIN (mode, op0, op1);
2744 case MAX_EXPR:
2745 if (unsignedp)
2746 return gen_rtx_UMAX (mode, op0, op1);
2747 else
2748 return gen_rtx_SMAX (mode, op0, op1);
2750 case BIT_AND_EXPR:
2751 case TRUTH_AND_EXPR:
2752 return gen_rtx_AND (mode, op0, op1);
2754 case BIT_IOR_EXPR:
2755 case TRUTH_OR_EXPR:
2756 return gen_rtx_IOR (mode, op0, op1);
2758 case BIT_XOR_EXPR:
2759 case TRUTH_XOR_EXPR:
2760 return gen_rtx_XOR (mode, op0, op1);
2762 case TRUTH_ANDIF_EXPR:
2763 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2765 case TRUTH_ORIF_EXPR:
2766 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2768 case TRUTH_NOT_EXPR:
2769 return gen_rtx_EQ (mode, op0, const0_rtx);
2771 case LT_EXPR:
2772 if (unsignedp)
2773 return gen_rtx_LTU (mode, op0, op1);
2774 else
2775 return gen_rtx_LT (mode, op0, op1);
2777 case LE_EXPR:
2778 if (unsignedp)
2779 return gen_rtx_LEU (mode, op0, op1);
2780 else
2781 return gen_rtx_LE (mode, op0, op1);
2783 case GT_EXPR:
2784 if (unsignedp)
2785 return gen_rtx_GTU (mode, op0, op1);
2786 else
2787 return gen_rtx_GT (mode, op0, op1);
2789 case GE_EXPR:
2790 if (unsignedp)
2791 return gen_rtx_GEU (mode, op0, op1);
2792 else
2793 return gen_rtx_GE (mode, op0, op1);
2795 case EQ_EXPR:
2796 return gen_rtx_EQ (mode, op0, op1);
2798 case NE_EXPR:
2799 return gen_rtx_NE (mode, op0, op1);
2801 case UNORDERED_EXPR:
2802 return gen_rtx_UNORDERED (mode, op0, op1);
2804 case ORDERED_EXPR:
2805 return gen_rtx_ORDERED (mode, op0, op1);
2807 case UNLT_EXPR:
2808 return gen_rtx_UNLT (mode, op0, op1);
2810 case UNLE_EXPR:
2811 return gen_rtx_UNLE (mode, op0, op1);
2813 case UNGT_EXPR:
2814 return gen_rtx_UNGT (mode, op0, op1);
2816 case UNGE_EXPR:
2817 return gen_rtx_UNGE (mode, op0, op1);
2819 case UNEQ_EXPR:
2820 return gen_rtx_UNEQ (mode, op0, op1);
2822 case LTGT_EXPR:
2823 return gen_rtx_LTGT (mode, op0, op1);
2825 case COND_EXPR:
2826 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2828 case COMPLEX_EXPR:
2829 gcc_assert (COMPLEX_MODE_P (mode));
2830 if (GET_MODE (op0) == VOIDmode)
2831 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2832 if (GET_MODE (op1) == VOIDmode)
2833 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2834 return gen_rtx_CONCAT (mode, op0, op1);
2836 case CONJ_EXPR:
2837 if (GET_CODE (op0) == CONCAT)
2838 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2839 gen_rtx_NEG (GET_MODE_INNER (mode),
2840 XEXP (op0, 1)));
2841 else
2843 enum machine_mode imode = GET_MODE_INNER (mode);
2844 rtx re, im;
2846 if (MEM_P (op0))
2848 re = adjust_address_nv (op0, imode, 0);
2849 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2851 else
2853 enum machine_mode ifmode = int_mode_for_mode (mode);
2854 enum machine_mode ihmode = int_mode_for_mode (imode);
2855 rtx halfsize;
2856 if (ifmode == BLKmode || ihmode == BLKmode)
2857 return NULL;
2858 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2859 re = op0;
2860 if (mode != ifmode)
2861 re = gen_rtx_SUBREG (ifmode, re, 0);
2862 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2863 if (imode != ihmode)
2864 re = gen_rtx_SUBREG (imode, re, 0);
2865 im = copy_rtx (op0);
2866 if (mode != ifmode)
2867 im = gen_rtx_SUBREG (ifmode, im, 0);
2868 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2869 if (imode != ihmode)
2870 im = gen_rtx_SUBREG (imode, im, 0);
2872 im = gen_rtx_NEG (imode, im);
2873 return gen_rtx_CONCAT (mode, re, im);
2876 case ADDR_EXPR:
2877 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2878 if (!op0 || !MEM_P (op0))
2879 return NULL;
2881 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2883 return op0;
2885 case VECTOR_CST:
2886 exp = build_constructor_from_list (TREE_TYPE (exp),
2887 TREE_VECTOR_CST_ELTS (exp));
2888 /* Fall through. */
2890 case CONSTRUCTOR:
2891 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2893 unsigned i;
2894 tree val;
2896 op0 = gen_rtx_CONCATN
2897 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2899 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2901 op1 = expand_debug_expr (val);
2902 if (!op1)
2903 return NULL;
2904 XVECEXP (op0, 0, i) = op1;
2907 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2909 op1 = expand_debug_expr
2910 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2912 if (!op1)
2913 return NULL;
2915 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2916 XVECEXP (op0, 0, i) = op1;
2919 return op0;
2921 else
2922 goto flag_unsupported;
2924 case CALL_EXPR:
2925 /* ??? Maybe handle some builtins? */
2926 return NULL;
2928 case SSA_NAME:
2930 gimple g = get_gimple_for_ssa_name (exp);
2931 if (g)
2933 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2934 if (!op0)
2935 return NULL;
2937 else
2939 int part = var_to_partition (SA.map, exp);
2941 if (part == NO_PARTITION)
2942 return NULL;
2944 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2946 op0 = SA.partition_to_pseudo[part];
2948 goto adjust_mode;
2951 case ERROR_MARK:
2952 return NULL;
2954 /* Vector stuff. For most of the codes we don't have rtl codes. */
2955 case REALIGN_LOAD_EXPR:
2956 case REDUC_MAX_EXPR:
2957 case REDUC_MIN_EXPR:
2958 case REDUC_PLUS_EXPR:
2959 case VEC_COND_EXPR:
2960 case VEC_EXTRACT_EVEN_EXPR:
2961 case VEC_EXTRACT_ODD_EXPR:
2962 case VEC_INTERLEAVE_HIGH_EXPR:
2963 case VEC_INTERLEAVE_LOW_EXPR:
2964 case VEC_LSHIFT_EXPR:
2965 case VEC_PACK_FIX_TRUNC_EXPR:
2966 case VEC_PACK_SAT_EXPR:
2967 case VEC_PACK_TRUNC_EXPR:
2968 case VEC_RSHIFT_EXPR:
2969 case VEC_UNPACK_FLOAT_HI_EXPR:
2970 case VEC_UNPACK_FLOAT_LO_EXPR:
2971 case VEC_UNPACK_HI_EXPR:
2972 case VEC_UNPACK_LO_EXPR:
2973 case VEC_WIDEN_MULT_HI_EXPR:
2974 case VEC_WIDEN_MULT_LO_EXPR:
2975 return NULL;
2977 /* Misc codes. */
2978 case ADDR_SPACE_CONVERT_EXPR:
2979 case FIXED_CONVERT_EXPR:
2980 case OBJ_TYPE_REF:
2981 case WITH_SIZE_EXPR:
2982 return NULL;
2984 case DOT_PROD_EXPR:
2985 if (SCALAR_INT_MODE_P (GET_MODE (op0))
2986 && SCALAR_INT_MODE_P (mode))
2988 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2989 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2990 else
2991 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2992 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
2993 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
2994 else
2995 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
2996 op0 = gen_rtx_MULT (mode, op0, op1);
2997 return gen_rtx_PLUS (mode, op0, op2);
2999 return NULL;
3001 case WIDEN_MULT_EXPR:
3002 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3003 && SCALAR_INT_MODE_P (mode))
3005 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3006 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3007 else
3008 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3009 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3010 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3011 else
3012 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3013 return gen_rtx_MULT (mode, op0, op1);
3015 return NULL;
3017 case WIDEN_SUM_EXPR:
3018 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3019 && SCALAR_INT_MODE_P (mode))
3021 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3022 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3023 else
3024 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3025 return gen_rtx_PLUS (mode, op0, op1);
3027 return NULL;
3029 default:
3030 flag_unsupported:
3031 #ifdef ENABLE_CHECKING
3032 debug_tree (exp);
3033 gcc_unreachable ();
3034 #else
3035 return NULL;
3036 #endif
3040 /* Expand the _LOCs in debug insns. We run this after expanding all
3041 regular insns, so that any variables referenced in the function
3042 will have their DECL_RTLs set. */
3044 static void
3045 expand_debug_locations (void)
3047 rtx insn;
3048 rtx last = get_last_insn ();
3049 int save_strict_alias = flag_strict_aliasing;
3051 /* New alias sets while setting up memory attributes cause
3052 -fcompare-debug failures, even though it doesn't bring about any
3053 codegen changes. */
3054 flag_strict_aliasing = 0;
3056 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3057 if (DEBUG_INSN_P (insn))
3059 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3060 rtx val;
3061 enum machine_mode mode;
3063 if (value == NULL_TREE)
3064 val = NULL_RTX;
3065 else
3067 val = expand_debug_expr (value);
3068 gcc_assert (last == get_last_insn ());
3071 if (!val)
3072 val = gen_rtx_UNKNOWN_VAR_LOC ();
3073 else
3075 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3077 gcc_assert (mode == GET_MODE (val)
3078 || (GET_MODE (val) == VOIDmode
3079 && (CONST_INT_P (val)
3080 || GET_CODE (val) == CONST_FIXED
3081 || GET_CODE (val) == CONST_DOUBLE
3082 || GET_CODE (val) == LABEL_REF)));
3085 INSN_VAR_LOCATION_LOC (insn) = val;
3088 flag_strict_aliasing = save_strict_alias;
3091 /* Expand basic block BB from GIMPLE trees to RTL. */
3093 static basic_block
3094 expand_gimple_basic_block (basic_block bb)
3096 gimple_stmt_iterator gsi;
3097 gimple_seq stmts;
3098 gimple stmt = NULL;
3099 rtx note, last;
3100 edge e;
3101 edge_iterator ei;
3102 void **elt;
3104 if (dump_file)
3105 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3106 bb->index);
3108 /* Note that since we are now transitioning from GIMPLE to RTL, we
3109 cannot use the gsi_*_bb() routines because they expect the basic
3110 block to be in GIMPLE, instead of RTL. Therefore, we need to
3111 access the BB sequence directly. */
3112 stmts = bb_seq (bb);
3113 bb->il.gimple = NULL;
3114 rtl_profile_for_bb (bb);
3115 init_rtl_bb_info (bb);
3116 bb->flags |= BB_RTL;
3118 /* Remove the RETURN_EXPR if we may fall though to the exit
3119 instead. */
3120 gsi = gsi_last (stmts);
3121 if (!gsi_end_p (gsi)
3122 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3124 gimple ret_stmt = gsi_stmt (gsi);
3126 gcc_assert (single_succ_p (bb));
3127 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3129 if (bb->next_bb == EXIT_BLOCK_PTR
3130 && !gimple_return_retval (ret_stmt))
3132 gsi_remove (&gsi, false);
3133 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3137 gsi = gsi_start (stmts);
3138 if (!gsi_end_p (gsi))
3140 stmt = gsi_stmt (gsi);
3141 if (gimple_code (stmt) != GIMPLE_LABEL)
3142 stmt = NULL;
3145 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3147 if (stmt || elt)
3149 last = get_last_insn ();
3151 if (stmt)
3153 expand_gimple_stmt (stmt);
3154 gsi_next (&gsi);
3157 if (elt)
3158 emit_label ((rtx) *elt);
3160 /* Java emits line number notes in the top of labels.
3161 ??? Make this go away once line number notes are obsoleted. */
3162 BB_HEAD (bb) = NEXT_INSN (last);
3163 if (NOTE_P (BB_HEAD (bb)))
3164 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3165 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3167 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3169 else
3170 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3172 NOTE_BASIC_BLOCK (note) = bb;
3174 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3176 basic_block new_bb;
3178 stmt = gsi_stmt (gsi);
3180 /* If this statement is a non-debug one, and we generate debug
3181 insns, then this one might be the last real use of a TERed
3182 SSA_NAME, but where there are still some debug uses further
3183 down. Expanding the current SSA name in such further debug
3184 uses by their RHS might lead to wrong debug info, as coalescing
3185 might make the operands of such RHS be placed into the same
3186 pseudo as something else. Like so:
3187 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3188 use(a_1);
3189 a_2 = ...
3190 #DEBUG ... => a_1
3191 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3192 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3193 the write to a_2 would actually have clobbered the place which
3194 formerly held a_0.
3196 So, instead of that, we recognize the situation, and generate
3197 debug temporaries at the last real use of TERed SSA names:
3198 a_1 = a_0 + 1;
3199 #DEBUG #D1 => a_1
3200 use(a_1);
3201 a_2 = ...
3202 #DEBUG ... => #D1
3204 if (MAY_HAVE_DEBUG_INSNS
3205 && SA.values
3206 && !is_gimple_debug (stmt))
3208 ssa_op_iter iter;
3209 tree op;
3210 gimple def;
3212 location_t sloc = get_curr_insn_source_location ();
3213 tree sblock = get_curr_insn_block ();
3215 /* Look for SSA names that have their last use here (TERed
3216 names always have only one real use). */
3217 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3218 if ((def = get_gimple_for_ssa_name (op)))
3220 imm_use_iterator imm_iter;
3221 use_operand_p use_p;
3222 bool have_debug_uses = false;
3224 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3226 if (gimple_debug_bind_p (USE_STMT (use_p)))
3228 have_debug_uses = true;
3229 break;
3233 if (have_debug_uses)
3235 /* OP is a TERed SSA name, with DEF it's defining
3236 statement, and where OP is used in further debug
3237 instructions. Generate a debug temporary, and
3238 replace all uses of OP in debug insns with that
3239 temporary. */
3240 gimple debugstmt;
3241 tree value = gimple_assign_rhs_to_tree (def);
3242 tree vexpr = make_node (DEBUG_EXPR_DECL);
3243 rtx val;
3244 enum machine_mode mode;
3246 set_curr_insn_source_location (gimple_location (def));
3247 set_curr_insn_block (gimple_block (def));
3249 DECL_ARTIFICIAL (vexpr) = 1;
3250 TREE_TYPE (vexpr) = TREE_TYPE (value);
3251 if (DECL_P (value))
3252 mode = DECL_MODE (value);
3253 else
3254 mode = TYPE_MODE (TREE_TYPE (value));
3255 DECL_MODE (vexpr) = mode;
3257 val = gen_rtx_VAR_LOCATION
3258 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3260 val = emit_debug_insn (val);
3262 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3264 if (!gimple_debug_bind_p (debugstmt))
3265 continue;
3267 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3268 SET_USE (use_p, vexpr);
3270 update_stmt (debugstmt);
3274 set_curr_insn_source_location (sloc);
3275 set_curr_insn_block (sblock);
3278 currently_expanding_gimple_stmt = stmt;
3280 /* Expand this statement, then evaluate the resulting RTL and
3281 fixup the CFG accordingly. */
3282 if (gimple_code (stmt) == GIMPLE_COND)
3284 new_bb = expand_gimple_cond (bb, stmt);
3285 if (new_bb)
3286 return new_bb;
3288 else if (gimple_debug_bind_p (stmt))
3290 location_t sloc = get_curr_insn_source_location ();
3291 tree sblock = get_curr_insn_block ();
3292 gimple_stmt_iterator nsi = gsi;
3294 for (;;)
3296 tree var = gimple_debug_bind_get_var (stmt);
3297 tree value;
3298 rtx val;
3299 enum machine_mode mode;
3301 if (gimple_debug_bind_has_value_p (stmt))
3302 value = gimple_debug_bind_get_value (stmt);
3303 else
3304 value = NULL_TREE;
3306 last = get_last_insn ();
3308 set_curr_insn_source_location (gimple_location (stmt));
3309 set_curr_insn_block (gimple_block (stmt));
3311 if (DECL_P (var))
3312 mode = DECL_MODE (var);
3313 else
3314 mode = TYPE_MODE (TREE_TYPE (var));
3316 val = gen_rtx_VAR_LOCATION
3317 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3319 val = emit_debug_insn (val);
3321 if (dump_file && (dump_flags & TDF_DETAILS))
3323 /* We can't dump the insn with a TREE where an RTX
3324 is expected. */
3325 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3326 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3327 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3330 /* In order not to generate too many debug temporaries,
3331 we delink all uses of debug statements we already expanded.
3332 Therefore debug statements between definition and real
3333 use of TERed SSA names will continue to use the SSA name,
3334 and not be replaced with debug temps. */
3335 delink_stmt_imm_use (stmt);
3337 gsi = nsi;
3338 gsi_next (&nsi);
3339 if (gsi_end_p (nsi))
3340 break;
3341 stmt = gsi_stmt (nsi);
3342 if (!gimple_debug_bind_p (stmt))
3343 break;
3346 set_curr_insn_source_location (sloc);
3347 set_curr_insn_block (sblock);
3349 else
3351 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3353 bool can_fallthru;
3354 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3355 if (new_bb)
3357 if (can_fallthru)
3358 bb = new_bb;
3359 else
3360 return new_bb;
3363 else
3365 def_operand_p def_p;
3366 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3368 if (def_p != NULL)
3370 /* Ignore this stmt if it is in the list of
3371 replaceable expressions. */
3372 if (SA.values
3373 && bitmap_bit_p (SA.values,
3374 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3375 continue;
3377 last = expand_gimple_stmt (stmt);
3378 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3383 currently_expanding_gimple_stmt = NULL;
3385 /* Expand implicit goto and convert goto_locus. */
3386 FOR_EACH_EDGE (e, ei, bb->succs)
3388 if (e->goto_locus && e->goto_block)
3390 set_curr_insn_source_location (e->goto_locus);
3391 set_curr_insn_block (e->goto_block);
3392 e->goto_locus = curr_insn_locator ();
3394 e->goto_block = NULL;
3395 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3397 emit_jump (label_rtx_for_bb (e->dest));
3398 e->flags &= ~EDGE_FALLTHRU;
3402 /* Expanded RTL can create a jump in the last instruction of block.
3403 This later might be assumed to be a jump to successor and break edge insertion.
3404 We need to insert dummy move to prevent this. PR41440. */
3405 if (single_succ_p (bb)
3406 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3407 && (last = get_last_insn ())
3408 && JUMP_P (last))
3410 rtx dummy = gen_reg_rtx (SImode);
3411 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3414 do_pending_stack_adjust ();
3416 /* Find the block tail. The last insn in the block is the insn
3417 before a barrier and/or table jump insn. */
3418 last = get_last_insn ();
3419 if (BARRIER_P (last))
3420 last = PREV_INSN (last);
3421 if (JUMP_TABLE_DATA_P (last))
3422 last = PREV_INSN (PREV_INSN (last));
3423 BB_END (bb) = last;
3425 update_bb_for_insn (bb);
3427 return bb;
3431 /* Create a basic block for initialization code. */
3433 static basic_block
3434 construct_init_block (void)
3436 basic_block init_block, first_block;
3437 edge e = NULL;
3438 int flags;
3440 /* Multiple entry points not supported yet. */
3441 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3442 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3443 init_rtl_bb_info (EXIT_BLOCK_PTR);
3444 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3445 EXIT_BLOCK_PTR->flags |= BB_RTL;
3447 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3449 /* When entry edge points to first basic block, we don't need jump,
3450 otherwise we have to jump into proper target. */
3451 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3453 tree label = gimple_block_label (e->dest);
3455 emit_jump (label_rtx (label));
3456 flags = 0;
3458 else
3459 flags = EDGE_FALLTHRU;
3461 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3462 get_last_insn (),
3463 ENTRY_BLOCK_PTR);
3464 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3465 init_block->count = ENTRY_BLOCK_PTR->count;
3466 if (e)
3468 first_block = e->dest;
3469 redirect_edge_succ (e, init_block);
3470 e = make_edge (init_block, first_block, flags);
3472 else
3473 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3474 e->probability = REG_BR_PROB_BASE;
3475 e->count = ENTRY_BLOCK_PTR->count;
3477 update_bb_for_insn (init_block);
3478 return init_block;
3481 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3482 found in the block tree. */
3484 static void
3485 set_block_levels (tree block, int level)
3487 while (block)
3489 BLOCK_NUMBER (block) = level;
3490 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3491 block = BLOCK_CHAIN (block);
3495 /* Create a block containing landing pads and similar stuff. */
3497 static void
3498 construct_exit_block (void)
3500 rtx head = get_last_insn ();
3501 rtx end;
3502 basic_block exit_block;
3503 edge e, e2;
3504 unsigned ix;
3505 edge_iterator ei;
3506 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3508 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3510 /* Make sure the locus is set to the end of the function, so that
3511 epilogue line numbers and warnings are set properly. */
3512 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3513 input_location = cfun->function_end_locus;
3515 /* The following insns belong to the top scope. */
3516 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3518 /* Generate rtl for function exit. */
3519 expand_function_end ();
3521 end = get_last_insn ();
3522 if (head == end)
3523 return;
3524 /* While emitting the function end we could move end of the last basic block.
3526 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3527 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3528 head = NEXT_INSN (head);
3529 exit_block = create_basic_block (NEXT_INSN (head), end,
3530 EXIT_BLOCK_PTR->prev_bb);
3531 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3532 exit_block->count = EXIT_BLOCK_PTR->count;
3534 ix = 0;
3535 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3537 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3538 if (!(e->flags & EDGE_ABNORMAL))
3539 redirect_edge_succ (e, exit_block);
3540 else
3541 ix++;
3544 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3545 e->probability = REG_BR_PROB_BASE;
3546 e->count = EXIT_BLOCK_PTR->count;
3547 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3548 if (e2 != e)
3550 e->count -= e2->count;
3551 exit_block->count -= e2->count;
3552 exit_block->frequency -= EDGE_FREQUENCY (e2);
3554 if (e->count < 0)
3555 e->count = 0;
3556 if (exit_block->count < 0)
3557 exit_block->count = 0;
3558 if (exit_block->frequency < 0)
3559 exit_block->frequency = 0;
3560 update_bb_for_insn (exit_block);
3563 /* Helper function for discover_nonconstant_array_refs.
3564 Look for ARRAY_REF nodes with non-constant indexes and mark them
3565 addressable. */
3567 static tree
3568 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3569 void *data ATTRIBUTE_UNUSED)
3571 tree t = *tp;
3573 if (IS_TYPE_OR_DECL_P (t))
3574 *walk_subtrees = 0;
3575 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3577 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3578 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3579 && (!TREE_OPERAND (t, 2)
3580 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3581 || (TREE_CODE (t) == COMPONENT_REF
3582 && (!TREE_OPERAND (t,2)
3583 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3584 || TREE_CODE (t) == BIT_FIELD_REF
3585 || TREE_CODE (t) == REALPART_EXPR
3586 || TREE_CODE (t) == IMAGPART_EXPR
3587 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3588 || CONVERT_EXPR_P (t))
3589 t = TREE_OPERAND (t, 0);
3591 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3593 t = get_base_address (t);
3594 if (t && DECL_P (t)
3595 && DECL_MODE (t) != BLKmode)
3596 TREE_ADDRESSABLE (t) = 1;
3599 *walk_subtrees = 0;
3602 return NULL_TREE;
3605 /* RTL expansion is not able to compile array references with variable
3606 offsets for arrays stored in single register. Discover such
3607 expressions and mark variables as addressable to avoid this
3608 scenario. */
3610 static void
3611 discover_nonconstant_array_refs (void)
3613 basic_block bb;
3614 gimple_stmt_iterator gsi;
3616 FOR_EACH_BB (bb)
3617 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3619 gimple stmt = gsi_stmt (gsi);
3620 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3624 /* This function sets crtl->args.internal_arg_pointer to a virtual
3625 register if DRAP is needed. Local register allocator will replace
3626 virtual_incoming_args_rtx with the virtual register. */
3628 static void
3629 expand_stack_alignment (void)
3631 rtx drap_rtx;
3632 unsigned int preferred_stack_boundary;
3634 if (! SUPPORTS_STACK_ALIGNMENT)
3635 return;
3637 if (cfun->calls_alloca
3638 || cfun->has_nonlocal_label
3639 || crtl->has_nonlocal_goto)
3640 crtl->need_drap = true;
3642 /* Call update_stack_boundary here again to update incoming stack
3643 boundary. It may set incoming stack alignment to a different
3644 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3645 use the minimum incoming stack alignment to check if it is OK
3646 to perform sibcall optimization since sibcall optimization will
3647 only align the outgoing stack to incoming stack boundary. */
3648 if (targetm.calls.update_stack_boundary)
3649 targetm.calls.update_stack_boundary ();
3651 /* The incoming stack frame has to be aligned at least at
3652 parm_stack_boundary. */
3653 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3655 /* Update crtl->stack_alignment_estimated and use it later to align
3656 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3657 exceptions since callgraph doesn't collect incoming stack alignment
3658 in this case. */
3659 if (flag_non_call_exceptions
3660 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3661 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3662 else
3663 preferred_stack_boundary = crtl->preferred_stack_boundary;
3664 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3665 crtl->stack_alignment_estimated = preferred_stack_boundary;
3666 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3667 crtl->stack_alignment_needed = preferred_stack_boundary;
3669 gcc_assert (crtl->stack_alignment_needed
3670 <= crtl->stack_alignment_estimated);
3672 crtl->stack_realign_needed
3673 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3674 crtl->stack_realign_tried = crtl->stack_realign_needed;
3676 crtl->stack_realign_processed = true;
3678 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3679 alignment. */
3680 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3681 drap_rtx = targetm.calls.get_drap_rtx ();
3683 /* stack_realign_drap and drap_rtx must match. */
3684 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3686 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3687 if (NULL != drap_rtx)
3689 crtl->args.internal_arg_pointer = drap_rtx;
3691 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3692 needed. */
3693 fixup_tail_calls ();
3697 /* Translate the intermediate representation contained in the CFG
3698 from GIMPLE trees to RTL.
3700 We do conversion per basic block and preserve/update the tree CFG.
3701 This implies we have to do some magic as the CFG can simultaneously
3702 consist of basic blocks containing RTL and GIMPLE trees. This can
3703 confuse the CFG hooks, so be careful to not manipulate CFG during
3704 the expansion. */
3706 static unsigned int
3707 gimple_expand_cfg (void)
3709 basic_block bb, init_block;
3710 sbitmap blocks;
3711 edge_iterator ei;
3712 edge e;
3713 unsigned i;
3715 rewrite_out_of_ssa (&SA);
3716 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3717 sizeof (rtx));
3719 /* Some backends want to know that we are expanding to RTL. */
3720 currently_expanding_to_rtl = 1;
3722 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3724 insn_locators_alloc ();
3725 if (!DECL_IS_BUILTIN (current_function_decl))
3727 /* Eventually, all FEs should explicitly set function_start_locus. */
3728 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3729 set_curr_insn_source_location
3730 (DECL_SOURCE_LOCATION (current_function_decl));
3731 else
3732 set_curr_insn_source_location (cfun->function_start_locus);
3734 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3735 prologue_locator = curr_insn_locator ();
3737 /* Make sure first insn is a note even if we don't want linenums.
3738 This makes sure the first insn will never be deleted.
3739 Also, final expects a note to appear there. */
3740 emit_note (NOTE_INSN_DELETED);
3742 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3743 discover_nonconstant_array_refs ();
3745 targetm.expand_to_rtl_hook ();
3746 crtl->stack_alignment_needed = STACK_BOUNDARY;
3747 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3748 crtl->stack_alignment_estimated = 0;
3749 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3750 cfun->cfg->max_jumptable_ents = 0;
3753 /* Expand the variables recorded during gimple lowering. */
3754 expand_used_vars ();
3756 /* Honor stack protection warnings. */
3757 if (warn_stack_protect)
3759 if (cfun->calls_alloca)
3760 warning (OPT_Wstack_protector,
3761 "not protecting local variables: variable length buffer");
3762 if (has_short_buffer && !crtl->stack_protect_guard)
3763 warning (OPT_Wstack_protector,
3764 "not protecting function: no buffer at least %d bytes long",
3765 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3768 /* Set up parameters and prepare for return, for the function. */
3769 expand_function_start (current_function_decl);
3771 /* Now that we also have the parameter RTXs, copy them over to our
3772 partitions. */
3773 for (i = 0; i < SA.map->num_partitions; i++)
3775 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3777 if (TREE_CODE (var) != VAR_DECL
3778 && !SA.partition_to_pseudo[i])
3779 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3780 gcc_assert (SA.partition_to_pseudo[i]);
3782 /* If this decl was marked as living in multiple places, reset
3783 this now to NULL. */
3784 if (DECL_RTL_IF_SET (var) == pc_rtx)
3785 SET_DECL_RTL (var, NULL);
3787 /* Some RTL parts really want to look at DECL_RTL(x) when x
3788 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3789 SET_DECL_RTL here making this available, but that would mean
3790 to select one of the potentially many RTLs for one DECL. Instead
3791 of doing that we simply reset the MEM_EXPR of the RTL in question,
3792 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3793 if (!DECL_RTL_SET_P (var))
3795 if (MEM_P (SA.partition_to_pseudo[i]))
3796 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3800 /* If this function is `main', emit a call to `__main'
3801 to run global initializers, etc. */
3802 if (DECL_NAME (current_function_decl)
3803 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3804 && DECL_FILE_SCOPE_P (current_function_decl))
3805 expand_main_function ();
3807 /* Initialize the stack_protect_guard field. This must happen after the
3808 call to __main (if any) so that the external decl is initialized. */
3809 if (crtl->stack_protect_guard)
3810 stack_protect_prologue ();
3812 expand_phi_nodes (&SA);
3814 /* Register rtl specific functions for cfg. */
3815 rtl_register_cfg_hooks ();
3817 init_block = construct_init_block ();
3819 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3820 remaining edges later. */
3821 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3822 e->flags &= ~EDGE_EXECUTABLE;
3824 lab_rtx_for_bb = pointer_map_create ();
3825 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3826 bb = expand_gimple_basic_block (bb);
3828 if (MAY_HAVE_DEBUG_INSNS)
3829 expand_debug_locations ();
3831 execute_free_datastructures ();
3832 finish_out_of_ssa (&SA);
3834 /* We are no longer in SSA form. */
3835 cfun->gimple_df->in_ssa_p = false;
3837 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3838 conservatively to true until they are all profile aware. */
3839 pointer_map_destroy (lab_rtx_for_bb);
3840 free_histograms ();
3842 construct_exit_block ();
3843 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3844 insn_locators_finalize ();
3846 /* Zap the tree EH table. */
3847 set_eh_throw_stmt_table (cfun, NULL);
3849 rebuild_jump_labels (get_insns ());
3851 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3853 edge e;
3854 edge_iterator ei;
3855 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3857 if (e->insns.r)
3858 commit_one_edge_insertion (e);
3859 else
3860 ei_next (&ei);
3864 /* We're done expanding trees to RTL. */
3865 currently_expanding_to_rtl = 0;
3867 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3869 edge e;
3870 edge_iterator ei;
3871 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3873 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3874 e->flags &= ~EDGE_EXECUTABLE;
3876 /* At the moment not all abnormal edges match the RTL
3877 representation. It is safe to remove them here as
3878 find_many_sub_basic_blocks will rediscover them.
3879 In the future we should get this fixed properly. */
3880 if ((e->flags & EDGE_ABNORMAL)
3881 && !(e->flags & EDGE_SIBCALL))
3882 remove_edge (e);
3883 else
3884 ei_next (&ei);
3888 blocks = sbitmap_alloc (last_basic_block);
3889 sbitmap_ones (blocks);
3890 find_many_sub_basic_blocks (blocks);
3891 sbitmap_free (blocks);
3892 purge_all_dead_edges ();
3894 compact_blocks ();
3896 expand_stack_alignment ();
3898 #ifdef ENABLE_CHECKING
3899 verify_flow_info ();
3900 #endif
3902 /* There's no need to defer outputting this function any more; we
3903 know we want to output it. */
3904 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3906 /* Now that we're done expanding trees to RTL, we shouldn't have any
3907 more CONCATs anywhere. */
3908 generating_concat_p = 0;
3910 if (dump_file)
3912 fprintf (dump_file,
3913 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3914 /* And the pass manager will dump RTL for us. */
3917 /* If we're emitting a nested function, make sure its parent gets
3918 emitted as well. Doing otherwise confuses debug info. */
3920 tree parent;
3921 for (parent = DECL_CONTEXT (current_function_decl);
3922 parent != NULL_TREE;
3923 parent = get_containing_scope (parent))
3924 if (TREE_CODE (parent) == FUNCTION_DECL)
3925 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3928 /* We are now committed to emitting code for this function. Do any
3929 preparation, such as emitting abstract debug info for the inline
3930 before it gets mangled by optimization. */
3931 if (cgraph_function_possibly_inlined_p (current_function_decl))
3932 (*debug_hooks->outlining_inline_function) (current_function_decl);
3934 TREE_ASM_WRITTEN (current_function_decl) = 1;
3936 /* After expanding, the return labels are no longer needed. */
3937 return_label = NULL;
3938 naked_return_label = NULL;
3939 /* Tag the blocks with a depth number so that change_scope can find
3940 the common parent easily. */
3941 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3942 default_rtl_profile ();
3943 return 0;
3946 struct rtl_opt_pass pass_expand =
3949 RTL_PASS,
3950 "expand", /* name */
3951 NULL, /* gate */
3952 gimple_expand_cfg, /* execute */
3953 NULL, /* sub */
3954 NULL, /* next */
3955 0, /* static_pass_number */
3956 TV_EXPAND, /* tv_id */
3957 PROP_ssa | PROP_gimple_leh | PROP_cfg
3958 | PROP_gimple_lcx, /* properties_required */
3959 PROP_rtl, /* properties_provided */
3960 PROP_ssa | PROP_trees, /* properties_destroyed */
3961 TODO_verify_ssa | TODO_verify_flow
3962 | TODO_verify_stmts, /* todo_flags_start */
3963 TODO_dump_func
3964 | TODO_ggc_collect /* todo_flags_finish */