* config/rl78/constraints.md (Wcv): Allow up to $r31.
[official-gcc.git] / gcc / cfgexpand.c
blob88e48c2a342da64ef29c8c7def438f741844667a
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "langhooks.h"
31 #include "tree-ssa.h"
32 #include "tree-pass.h"
33 #include "except.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "toplev.h"
38 #include "debug.h"
39 #include "params.h"
40 #include "tree-inline.h"
41 #include "value-prof.h"
42 #include "target.h"
43 #include "ssaexpand.h"
44 #include "bitmap.h"
45 #include "sbitmap.h"
46 #include "cfgloop.h"
47 #include "regs.h" /* For reg_renumber. */
48 #include "insn-attr.h" /* For INSN_SCHEDULING. */
49 #include "asan.h"
51 /* This variable holds information helping the rewriting of SSA trees
52 into RTL. */
53 struct ssaexpand SA;
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
59 static rtx expand_debug_expr (tree);
61 /* Return an expression tree corresponding to the RHS of GIMPLE
62 statement STMT. */
64 tree
65 gimple_assign_rhs_to_tree (gimple stmt)
67 tree t;
68 enum gimple_rhs_class grhs_class;
70 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
72 if (grhs_class == GIMPLE_TERNARY_RHS)
73 t = build3 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt),
76 gimple_assign_rhs2 (stmt),
77 gimple_assign_rhs3 (stmt));
78 else if (grhs_class == GIMPLE_BINARY_RHS)
79 t = build2 (gimple_assign_rhs_code (stmt),
80 TREE_TYPE (gimple_assign_lhs (stmt)),
81 gimple_assign_rhs1 (stmt),
82 gimple_assign_rhs2 (stmt));
83 else if (grhs_class == GIMPLE_UNARY_RHS)
84 t = build1 (gimple_assign_rhs_code (stmt),
85 TREE_TYPE (gimple_assign_lhs (stmt)),
86 gimple_assign_rhs1 (stmt));
87 else if (grhs_class == GIMPLE_SINGLE_RHS)
89 t = gimple_assign_rhs1 (stmt);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 && gimple_location (stmt) != EXPR_LOCATION (t))
93 || (gimple_block (stmt)
94 && currently_expanding_to_rtl
95 && EXPR_P (t)))
96 t = copy_node (t);
98 else
99 gcc_unreachable ();
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
104 return t;
108 #ifndef STACK_ALIGNMENT_NEEDED
109 #define STACK_ALIGNMENT_NEEDED 1
110 #endif
112 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
114 /* Associate declaration T with storage space X. If T is no
115 SSA name this is exactly SET_DECL_RTL, otherwise make the
116 partition of T associated with X. */
117 static inline void
118 set_rtl (tree t, rtx x)
120 if (TREE_CODE (t) == SSA_NAME)
122 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
123 if (x && !MEM_P (x))
124 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
125 /* For the benefit of debug information at -O0 (where vartracking
126 doesn't run) record the place also in the base DECL if it's
127 a normal variable (not a parameter). */
128 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
130 tree var = SSA_NAME_VAR (t);
131 /* If we don't yet have something recorded, just record it now. */
132 if (!DECL_RTL_SET_P (var))
133 SET_DECL_RTL (var, x);
134 /* If we have it set already to "multiple places" don't
135 change this. */
136 else if (DECL_RTL (var) == pc_rtx)
138 /* If we have something recorded and it's not the same place
139 as we want to record now, we have multiple partitions for the
140 same base variable, with different places. We can't just
141 randomly chose one, hence we have to say that we don't know.
142 This only happens with optimization, and there var-tracking
143 will figure out the right thing. */
144 else if (DECL_RTL (var) != x)
145 SET_DECL_RTL (var, pc_rtx);
148 else
149 SET_DECL_RTL (t, x);
152 /* This structure holds data relevant to one variable that will be
153 placed in a stack slot. */
154 struct stack_var
156 /* The Variable. */
157 tree decl;
159 /* Initially, the size of the variable. Later, the size of the partition,
160 if this variable becomes it's partition's representative. */
161 HOST_WIDE_INT size;
163 /* The *byte* alignment required for this variable. Or as, with the
164 size, the alignment for this partition. */
165 unsigned int alignb;
167 /* The partition representative. */
168 size_t representative;
170 /* The next stack variable in the partition, or EOC. */
171 size_t next;
173 /* The numbers of conflicting stack variables. */
174 bitmap conflicts;
177 #define EOC ((size_t)-1)
179 /* We have an array of such objects while deciding allocation. */
180 static struct stack_var *stack_vars;
181 static size_t stack_vars_alloc;
182 static size_t stack_vars_num;
183 static struct pointer_map_t *decl_to_stack_part;
185 /* Conflict bitmaps go on this obstack. This allows us to destroy
186 all of them in one big sweep. */
187 static bitmap_obstack stack_var_bitmap_obstack;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
209 static unsigned int
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
220 static HOST_WIDE_INT
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
233 else
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
247 return offset;
250 /* Accumulate DECL into STACK_VARS. */
252 static void
253 add_stack_var (tree decl)
255 struct stack_var *v;
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 else
262 stack_vars_alloc = 32;
263 stack_vars
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
272 v->decl = decl;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
276 if (v->size == 0)
277 v->size = 1;
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
284 v->next = EOC;
286 /* All variables initially conflict with no other. */
287 v->conflicts = NULL;
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
292 stack_vars_num++;
295 /* Make the decls associated with luid's X and Y conflict. */
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (x == y)
318 return false;
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323 return true;
325 if (!a->conflicts || !b->conflicts)
326 return false;
327 return bitmap_bit_p (a->conflicts, y);
330 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
331 enter its partition number into bitmap DATA. */
333 static bool
334 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
336 bitmap active = (bitmap)data;
337 op = get_base_address (op);
338 if (op
339 && DECL_P (op)
340 && DECL_RTL_IF_SET (op) == pc_rtx)
342 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
343 if (v)
344 bitmap_set_bit (active, *v);
346 return false;
349 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
350 record conflicts between it and all currently active other partitions
351 from bitmap DATA. */
353 static bool
354 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
356 bitmap active = (bitmap)data;
357 op = get_base_address (op);
358 if (op
359 && DECL_P (op)
360 && DECL_RTL_IF_SET (op) == pc_rtx)
362 size_t *v =
363 (size_t *) pointer_map_contains (decl_to_stack_part, op);
364 if (v && bitmap_set_bit (active, *v))
366 size_t num = *v;
367 bitmap_iterator bi;
368 unsigned i;
369 gcc_assert (num < stack_vars_num);
370 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
371 add_stack_var_conflict (num, i);
374 return false;
377 /* Helper routine for add_scope_conflicts, calculating the active partitions
378 at the end of BB, leaving the result in WORK. We're called to generate
379 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
380 liveness. */
382 static void
383 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
385 edge e;
386 edge_iterator ei;
387 gimple_stmt_iterator gsi;
388 bool (*visit)(gimple, tree, void *);
390 bitmap_clear (work);
391 FOR_EACH_EDGE (e, ei, bb->preds)
392 bitmap_ior_into (work, (bitmap)e->src->aux);
394 visit = visit_op;
396 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
398 gimple stmt = gsi_stmt (gsi);
399 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
401 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
403 gimple stmt = gsi_stmt (gsi);
405 if (gimple_clobber_p (stmt))
407 tree lhs = gimple_assign_lhs (stmt);
408 size_t *v;
409 /* Nested function lowering might introduce LHSs
410 that are COMPONENT_REFs. */
411 if (TREE_CODE (lhs) != VAR_DECL)
412 continue;
413 if (DECL_RTL_IF_SET (lhs) == pc_rtx
414 && (v = (size_t *)
415 pointer_map_contains (decl_to_stack_part, lhs)))
416 bitmap_clear_bit (work, *v);
418 else if (!is_gimple_debug (stmt))
420 if (for_conflict
421 && visit == visit_op)
423 /* If this is the first real instruction in this BB we need
424 to add conflicts for everything live at this point now.
425 Unlike classical liveness for named objects we can't
426 rely on seeing a def/use of the names we're interested in.
427 There might merely be indirect loads/stores. We'd not add any
428 conflicts for such partitions. */
429 bitmap_iterator bi;
430 unsigned i;
431 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
433 struct stack_var *a = &stack_vars[i];
434 if (!a->conflicts)
435 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
436 bitmap_ior_into (a->conflicts, work);
438 visit = visit_conflict;
440 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
445 /* Generate stack partition conflicts between all partitions that are
446 simultaneously live. */
448 static void
449 add_scope_conflicts (void)
451 basic_block bb;
452 bool changed;
453 bitmap work = BITMAP_ALLOC (NULL);
454 int *rpo;
455 int n_bbs;
457 /* We approximate the live range of a stack variable by taking the first
458 mention of its name as starting point(s), and by the end-of-scope
459 death clobber added by gimplify as ending point(s) of the range.
460 This overapproximates in the case we for instance moved an address-taken
461 operation upward, without also moving a dereference to it upwards.
462 But it's conservatively correct as a variable never can hold values
463 before its name is mentioned at least once.
465 We then do a mostly classical bitmap liveness algorithm. */
467 FOR_ALL_BB (bb)
468 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
470 rpo = XNEWVEC (int, last_basic_block);
471 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
473 changed = true;
474 while (changed)
476 int i;
477 changed = false;
478 for (i = 0; i < n_bbs; i++)
480 bitmap active;
481 bb = BASIC_BLOCK (rpo[i]);
482 active = (bitmap)bb->aux;
483 add_scope_conflicts_1 (bb, work, false);
484 if (bitmap_ior_into (active, work))
485 changed = true;
489 FOR_EACH_BB (bb)
490 add_scope_conflicts_1 (bb, work, true);
492 free (rpo);
493 BITMAP_FREE (work);
494 FOR_ALL_BB (bb)
495 BITMAP_FREE (bb->aux);
498 /* A subroutine of partition_stack_vars. A comparison function for qsort,
499 sorting an array of indices by the properties of the object. */
501 static int
502 stack_var_cmp (const void *a, const void *b)
504 size_t ia = *(const size_t *)a;
505 size_t ib = *(const size_t *)b;
506 unsigned int aligna = stack_vars[ia].alignb;
507 unsigned int alignb = stack_vars[ib].alignb;
508 HOST_WIDE_INT sizea = stack_vars[ia].size;
509 HOST_WIDE_INT sizeb = stack_vars[ib].size;
510 tree decla = stack_vars[ia].decl;
511 tree declb = stack_vars[ib].decl;
512 bool largea, largeb;
513 unsigned int uida, uidb;
515 /* Primary compare on "large" alignment. Large comes first. */
516 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
517 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
518 if (largea != largeb)
519 return (int)largeb - (int)largea;
521 /* Secondary compare on size, decreasing */
522 if (sizea > sizeb)
523 return -1;
524 if (sizea < sizeb)
525 return 1;
527 /* Tertiary compare on true alignment, decreasing. */
528 if (aligna < alignb)
529 return -1;
530 if (aligna > alignb)
531 return 1;
533 /* Final compare on ID for sort stability, increasing.
534 Two SSA names are compared by their version, SSA names come before
535 non-SSA names, and two normal decls are compared by their DECL_UID. */
536 if (TREE_CODE (decla) == SSA_NAME)
538 if (TREE_CODE (declb) == SSA_NAME)
539 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
540 else
541 return -1;
543 else if (TREE_CODE (declb) == SSA_NAME)
544 return 1;
545 else
546 uida = DECL_UID (decla), uidb = DECL_UID (declb);
547 if (uida < uidb)
548 return 1;
549 if (uida > uidb)
550 return -1;
551 return 0;
555 /* If the points-to solution *PI points to variables that are in a partition
556 together with other variables add all partition members to the pointed-to
557 variables bitmap. */
559 static void
560 add_partitioned_vars_to_ptset (struct pt_solution *pt,
561 struct pointer_map_t *decls_to_partitions,
562 struct pointer_set_t *visited, bitmap temp)
564 bitmap_iterator bi;
565 unsigned i;
566 bitmap *part;
568 if (pt->anything
569 || pt->vars == NULL
570 /* The pointed-to vars bitmap is shared, it is enough to
571 visit it once. */
572 || pointer_set_insert(visited, pt->vars))
573 return;
575 bitmap_clear (temp);
577 /* By using a temporary bitmap to store all members of the partitions
578 we have to add we make sure to visit each of the partitions only
579 once. */
580 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
581 if ((!temp
582 || !bitmap_bit_p (temp, i))
583 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
584 (void *)(size_t) i)))
585 bitmap_ior_into (temp, *part);
586 if (!bitmap_empty_p (temp))
587 bitmap_ior_into (pt->vars, temp);
590 /* Update points-to sets based on partition info, so we can use them on RTL.
591 The bitmaps representing stack partitions will be saved until expand,
592 where partitioned decls used as bases in memory expressions will be
593 rewritten. */
595 static void
596 update_alias_info_with_stack_vars (void)
598 struct pointer_map_t *decls_to_partitions = NULL;
599 size_t i, j;
600 tree var = NULL_TREE;
602 for (i = 0; i < stack_vars_num; i++)
604 bitmap part = NULL;
605 tree name;
606 struct ptr_info_def *pi;
608 /* Not interested in partitions with single variable. */
609 if (stack_vars[i].representative != i
610 || stack_vars[i].next == EOC)
611 continue;
613 if (!decls_to_partitions)
615 decls_to_partitions = pointer_map_create ();
616 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
619 /* Create an SSA_NAME that points to the partition for use
620 as base during alias-oracle queries on RTL for bases that
621 have been partitioned. */
622 if (var == NULL_TREE)
623 var = create_tmp_var (ptr_type_node, NULL);
624 name = make_ssa_name (var, NULL);
626 /* Create bitmaps representing partitions. They will be used for
627 points-to sets later, so use GGC alloc. */
628 part = BITMAP_GGC_ALLOC ();
629 for (j = i; j != EOC; j = stack_vars[j].next)
631 tree decl = stack_vars[j].decl;
632 unsigned int uid = DECL_PT_UID (decl);
633 bitmap_set_bit (part, uid);
634 *((bitmap *) pointer_map_insert (decls_to_partitions,
635 (void *)(size_t) uid)) = part;
636 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
637 decl)) = name;
638 if (TREE_ADDRESSABLE (decl))
639 TREE_ADDRESSABLE (name) = 1;
642 /* Make the SSA name point to all partition members. */
643 pi = get_ptr_info (name);
644 pt_solution_set (&pi->pt, part, false);
647 /* Make all points-to sets that contain one member of a partition
648 contain all members of the partition. */
649 if (decls_to_partitions)
651 unsigned i;
652 struct pointer_set_t *visited = pointer_set_create ();
653 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
655 for (i = 1; i < num_ssa_names; i++)
657 tree name = ssa_name (i);
658 struct ptr_info_def *pi;
660 if (name
661 && POINTER_TYPE_P (TREE_TYPE (name))
662 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
663 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
664 visited, temp);
667 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
668 decls_to_partitions, visited, temp);
670 pointer_set_destroy (visited);
671 pointer_map_destroy (decls_to_partitions);
672 BITMAP_FREE (temp);
676 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
677 partitioning algorithm. Partitions A and B are known to be non-conflicting.
678 Merge them into a single partition A. */
680 static void
681 union_stack_vars (size_t a, size_t b)
683 struct stack_var *vb = &stack_vars[b];
684 bitmap_iterator bi;
685 unsigned u;
687 gcc_assert (stack_vars[b].next == EOC);
688 /* Add B to A's partition. */
689 stack_vars[b].next = stack_vars[a].next;
690 stack_vars[b].representative = a;
691 stack_vars[a].next = b;
693 /* Update the required alignment of partition A to account for B. */
694 if (stack_vars[a].alignb < stack_vars[b].alignb)
695 stack_vars[a].alignb = stack_vars[b].alignb;
697 /* Update the interference graph and merge the conflicts. */
698 if (vb->conflicts)
700 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
701 add_stack_var_conflict (a, stack_vars[u].representative);
702 BITMAP_FREE (vb->conflicts);
706 /* A subroutine of expand_used_vars. Binpack the variables into
707 partitions constrained by the interference graph. The overall
708 algorithm used is as follows:
710 Sort the objects by size in descending order.
711 For each object A {
712 S = size(A)
713 O = 0
714 loop {
715 Look for the largest non-conflicting object B with size <= S.
716 UNION (A, B)
721 static void
722 partition_stack_vars (void)
724 size_t si, sj, n = stack_vars_num;
726 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
727 for (si = 0; si < n; ++si)
728 stack_vars_sorted[si] = si;
730 if (n == 1)
731 return;
733 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
735 for (si = 0; si < n; ++si)
737 size_t i = stack_vars_sorted[si];
738 unsigned int ialign = stack_vars[i].alignb;
739 HOST_WIDE_INT isize = stack_vars[i].size;
741 /* Ignore objects that aren't partition representatives. If we
742 see a var that is not a partition representative, it must
743 have been merged earlier. */
744 if (stack_vars[i].representative != i)
745 continue;
747 for (sj = si + 1; sj < n; ++sj)
749 size_t j = stack_vars_sorted[sj];
750 unsigned int jalign = stack_vars[j].alignb;
751 HOST_WIDE_INT jsize = stack_vars[j].size;
753 /* Ignore objects that aren't partition representatives. */
754 if (stack_vars[j].representative != j)
755 continue;
757 /* Do not mix objects of "small" (supported) alignment
758 and "large" (unsupported) alignment. */
759 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
760 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
761 break;
763 /* For Address Sanitizer do not mix objects with different
764 sizes, as the shorter vars wouldn't be adequately protected.
765 Don't do that for "large" (unsupported) alignment objects,
766 those aren't protected anyway. */
767 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
768 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
769 break;
771 /* Ignore conflicting objects. */
772 if (stack_var_conflict_p (i, j))
773 continue;
775 /* UNION the objects, placing J at OFFSET. */
776 union_stack_vars (i, j);
780 update_alias_info_with_stack_vars ();
783 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
785 static void
786 dump_stack_var_partition (void)
788 size_t si, i, j, n = stack_vars_num;
790 for (si = 0; si < n; ++si)
792 i = stack_vars_sorted[si];
794 /* Skip variables that aren't partition representatives, for now. */
795 if (stack_vars[i].representative != i)
796 continue;
798 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
799 " align %u\n", (unsigned long) i, stack_vars[i].size,
800 stack_vars[i].alignb);
802 for (j = i; j != EOC; j = stack_vars[j].next)
804 fputc ('\t', dump_file);
805 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
807 fputc ('\n', dump_file);
811 /* Assign rtl to DECL at BASE + OFFSET. */
813 static void
814 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
815 HOST_WIDE_INT offset)
817 unsigned align;
818 rtx x;
820 /* If this fails, we've overflowed the stack frame. Error nicely? */
821 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
823 x = plus_constant (Pmode, base, offset);
824 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
826 if (TREE_CODE (decl) != SSA_NAME)
828 /* Set alignment we actually gave this decl if it isn't an SSA name.
829 If it is we generate stack slots only accidentally so it isn't as
830 important, we'll simply use the alignment that is already set. */
831 if (base == virtual_stack_vars_rtx)
832 offset -= frame_phase;
833 align = offset & -offset;
834 align *= BITS_PER_UNIT;
835 if (align == 0 || align > base_align)
836 align = base_align;
838 /* One would think that we could assert that we're not decreasing
839 alignment here, but (at least) the i386 port does exactly this
840 via the MINIMUM_ALIGNMENT hook. */
842 DECL_ALIGN (decl) = align;
843 DECL_USER_ALIGN (decl) = 0;
846 set_mem_attributes (x, SSAVAR (decl), true);
847 set_rtl (decl, x);
850 struct stack_vars_data
852 /* Vector of offset pairs, always end of some padding followed
853 by start of the padding that needs Address Sanitizer protection.
854 The vector is in reversed, highest offset pairs come first. */
855 vec<HOST_WIDE_INT> asan_vec;
857 /* Vector of partition representative decls in between the paddings. */
858 vec<tree> asan_decl_vec;
861 /* A subroutine of expand_used_vars. Give each partition representative
862 a unique location within the stack frame. Update each partition member
863 with that location. */
865 static void
866 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
868 size_t si, i, j, n = stack_vars_num;
869 HOST_WIDE_INT large_size = 0, large_alloc = 0;
870 rtx large_base = NULL;
871 unsigned large_align = 0;
872 tree decl;
874 /* Determine if there are any variables requiring "large" alignment.
875 Since these are dynamically allocated, we only process these if
876 no predicate involved. */
877 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
878 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
880 /* Find the total size of these variables. */
881 for (si = 0; si < n; ++si)
883 unsigned alignb;
885 i = stack_vars_sorted[si];
886 alignb = stack_vars[i].alignb;
888 /* Stop when we get to the first decl with "small" alignment. */
889 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
890 break;
892 /* Skip variables that aren't partition representatives. */
893 if (stack_vars[i].representative != i)
894 continue;
896 /* Skip variables that have already had rtl assigned. See also
897 add_stack_var where we perpetrate this pc_rtx hack. */
898 decl = stack_vars[i].decl;
899 if ((TREE_CODE (decl) == SSA_NAME
900 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
901 : DECL_RTL (decl)) != pc_rtx)
902 continue;
904 large_size += alignb - 1;
905 large_size &= -(HOST_WIDE_INT)alignb;
906 large_size += stack_vars[i].size;
909 /* If there were any, allocate space. */
910 if (large_size > 0)
911 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
912 large_align, true);
915 for (si = 0; si < n; ++si)
917 rtx base;
918 unsigned base_align, alignb;
919 HOST_WIDE_INT offset;
921 i = stack_vars_sorted[si];
923 /* Skip variables that aren't partition representatives, for now. */
924 if (stack_vars[i].representative != i)
925 continue;
927 /* Skip variables that have already had rtl assigned. See also
928 add_stack_var where we perpetrate this pc_rtx hack. */
929 decl = stack_vars[i].decl;
930 if ((TREE_CODE (decl) == SSA_NAME
931 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
932 : DECL_RTL (decl)) != pc_rtx)
933 continue;
935 /* Check the predicate to see whether this variable should be
936 allocated in this pass. */
937 if (pred && !pred (i))
938 continue;
940 alignb = stack_vars[i].alignb;
941 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
943 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
945 HOST_WIDE_INT prev_offset = frame_offset;
946 tree repr_decl = NULL_TREE;
948 offset
949 = alloc_stack_frame_space (stack_vars[i].size
950 + ASAN_RED_ZONE_SIZE,
951 MAX (alignb, ASAN_RED_ZONE_SIZE));
952 data->asan_vec.safe_push (prev_offset);
953 data->asan_vec.safe_push (offset + stack_vars[i].size);
954 /* Find best representative of the partition.
955 Prefer those with DECL_NAME, even better
956 satisfying asan_protect_stack_decl predicate. */
957 for (j = i; j != EOC; j = stack_vars[j].next)
958 if (asan_protect_stack_decl (stack_vars[j].decl)
959 && DECL_NAME (stack_vars[j].decl))
961 repr_decl = stack_vars[j].decl;
962 break;
964 else if (repr_decl == NULL_TREE
965 && DECL_P (stack_vars[j].decl)
966 && DECL_NAME (stack_vars[j].decl))
967 repr_decl = stack_vars[j].decl;
968 if (repr_decl == NULL_TREE)
969 repr_decl = stack_vars[i].decl;
970 data->asan_decl_vec.safe_push (repr_decl);
972 else
973 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
974 base = virtual_stack_vars_rtx;
975 base_align = crtl->max_used_stack_slot_alignment;
977 else
979 /* Large alignment is only processed in the last pass. */
980 if (pred)
981 continue;
982 gcc_assert (large_base != NULL);
984 large_alloc += alignb - 1;
985 large_alloc &= -(HOST_WIDE_INT)alignb;
986 offset = large_alloc;
987 large_alloc += stack_vars[i].size;
989 base = large_base;
990 base_align = large_align;
993 /* Create rtl for each variable based on their location within the
994 partition. */
995 for (j = i; j != EOC; j = stack_vars[j].next)
997 expand_one_stack_var_at (stack_vars[j].decl,
998 base, base_align,
999 offset);
1003 gcc_assert (large_alloc == large_size);
1006 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1007 static HOST_WIDE_INT
1008 account_stack_vars (void)
1010 size_t si, j, i, n = stack_vars_num;
1011 HOST_WIDE_INT size = 0;
1013 for (si = 0; si < n; ++si)
1015 i = stack_vars_sorted[si];
1017 /* Skip variables that aren't partition representatives, for now. */
1018 if (stack_vars[i].representative != i)
1019 continue;
1021 size += stack_vars[i].size;
1022 for (j = i; j != EOC; j = stack_vars[j].next)
1023 set_rtl (stack_vars[j].decl, NULL);
1025 return size;
1028 /* A subroutine of expand_one_var. Called to immediately assign rtl
1029 to a variable to be allocated in the stack frame. */
1031 static void
1032 expand_one_stack_var (tree var)
1034 HOST_WIDE_INT size, offset;
1035 unsigned byte_align;
1037 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1038 byte_align = align_local_variable (SSAVAR (var));
1040 /* We handle highly aligned variables in expand_stack_vars. */
1041 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1043 offset = alloc_stack_frame_space (size, byte_align);
1045 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1046 crtl->max_used_stack_slot_alignment, offset);
1049 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1050 that will reside in a hard register. */
1052 static void
1053 expand_one_hard_reg_var (tree var)
1055 rest_of_decl_compilation (var, 0, 0);
1058 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1059 that will reside in a pseudo register. */
1061 static void
1062 expand_one_register_var (tree var)
1064 tree decl = SSAVAR (var);
1065 tree type = TREE_TYPE (decl);
1066 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1067 rtx x = gen_reg_rtx (reg_mode);
1069 set_rtl (var, x);
1071 /* Note if the object is a user variable. */
1072 if (!DECL_ARTIFICIAL (decl))
1073 mark_user_reg (x);
1075 if (POINTER_TYPE_P (type))
1076 mark_reg_pointer (x, get_pointer_alignment (var));
1079 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1080 has some associated error, e.g. its type is error-mark. We just need
1081 to pick something that won't crash the rest of the compiler. */
1083 static void
1084 expand_one_error_var (tree var)
1086 enum machine_mode mode = DECL_MODE (var);
1087 rtx x;
1089 if (mode == BLKmode)
1090 x = gen_rtx_MEM (BLKmode, const0_rtx);
1091 else if (mode == VOIDmode)
1092 x = const0_rtx;
1093 else
1094 x = gen_reg_rtx (mode);
1096 SET_DECL_RTL (var, x);
1099 /* A subroutine of expand_one_var. VAR is a variable that will be
1100 allocated to the local stack frame. Return true if we wish to
1101 add VAR to STACK_VARS so that it will be coalesced with other
1102 variables. Return false to allocate VAR immediately.
1104 This function is used to reduce the number of variables considered
1105 for coalescing, which reduces the size of the quadratic problem. */
1107 static bool
1108 defer_stack_allocation (tree var, bool toplevel)
1110 /* If stack protection is enabled, *all* stack variables must be deferred,
1111 so that we can re-order the strings to the top of the frame.
1112 Similarly for Address Sanitizer. */
1113 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
1114 return true;
1116 /* We handle "large" alignment via dynamic allocation. We want to handle
1117 this extra complication in only one place, so defer them. */
1118 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1119 return true;
1121 /* Variables in the outermost scope automatically conflict with
1122 every other variable. The only reason to want to defer them
1123 at all is that, after sorting, we can more efficiently pack
1124 small variables in the stack frame. Continue to defer at -O2. */
1125 if (toplevel && optimize < 2)
1126 return false;
1128 /* Without optimization, *most* variables are allocated from the
1129 stack, which makes the quadratic problem large exactly when we
1130 want compilation to proceed as quickly as possible. On the
1131 other hand, we don't want the function's stack frame size to
1132 get completely out of hand. So we avoid adding scalars and
1133 "small" aggregates to the list at all. */
1134 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1135 return false;
1137 return true;
1140 /* A subroutine of expand_used_vars. Expand one variable according to
1141 its flavor. Variables to be placed on the stack are not actually
1142 expanded yet, merely recorded.
1143 When REALLY_EXPAND is false, only add stack values to be allocated.
1144 Return stack usage this variable is supposed to take.
1147 static HOST_WIDE_INT
1148 expand_one_var (tree var, bool toplevel, bool really_expand)
1150 unsigned int align = BITS_PER_UNIT;
1151 tree origvar = var;
1153 var = SSAVAR (var);
1155 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1157 /* Because we don't know if VAR will be in register or on stack,
1158 we conservatively assume it will be on stack even if VAR is
1159 eventually put into register after RA pass. For non-automatic
1160 variables, which won't be on stack, we collect alignment of
1161 type and ignore user specified alignment. */
1162 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1163 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1164 TYPE_MODE (TREE_TYPE (var)),
1165 TYPE_ALIGN (TREE_TYPE (var)));
1166 else if (DECL_HAS_VALUE_EXPR_P (var)
1167 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1168 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1169 or variables which were assigned a stack slot already by
1170 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1171 changed from the offset chosen to it. */
1172 align = crtl->stack_alignment_estimated;
1173 else
1174 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1176 /* If the variable alignment is very large we'll dynamicaly allocate
1177 it, which means that in-frame portion is just a pointer. */
1178 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1179 align = POINTER_SIZE;
1182 if (SUPPORTS_STACK_ALIGNMENT
1183 && crtl->stack_alignment_estimated < align)
1185 /* stack_alignment_estimated shouldn't change after stack
1186 realign decision made */
1187 gcc_assert(!crtl->stack_realign_processed);
1188 crtl->stack_alignment_estimated = align;
1191 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1192 So here we only make sure stack_alignment_needed >= align. */
1193 if (crtl->stack_alignment_needed < align)
1194 crtl->stack_alignment_needed = align;
1195 if (crtl->max_used_stack_slot_alignment < align)
1196 crtl->max_used_stack_slot_alignment = align;
1198 if (TREE_CODE (origvar) == SSA_NAME)
1200 gcc_assert (TREE_CODE (var) != VAR_DECL
1201 || (!DECL_EXTERNAL (var)
1202 && !DECL_HAS_VALUE_EXPR_P (var)
1203 && !TREE_STATIC (var)
1204 && TREE_TYPE (var) != error_mark_node
1205 && !DECL_HARD_REGISTER (var)
1206 && really_expand));
1208 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1210 else if (DECL_EXTERNAL (var))
1212 else if (DECL_HAS_VALUE_EXPR_P (var))
1214 else if (TREE_STATIC (var))
1216 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1218 else if (TREE_TYPE (var) == error_mark_node)
1220 if (really_expand)
1221 expand_one_error_var (var);
1223 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1225 if (really_expand)
1226 expand_one_hard_reg_var (var);
1228 else if (use_register_for_decl (var))
1230 if (really_expand)
1231 expand_one_register_var (origvar);
1233 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1235 /* Reject variables which cover more than half of the address-space. */
1236 if (really_expand)
1238 error ("size of variable %q+D is too large", var);
1239 expand_one_error_var (var);
1242 else if (defer_stack_allocation (var, toplevel))
1243 add_stack_var (origvar);
1244 else
1246 if (really_expand)
1247 expand_one_stack_var (origvar);
1248 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1250 return 0;
1253 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1254 expanding variables. Those variables that can be put into registers
1255 are allocated pseudos; those that can't are put on the stack.
1257 TOPLEVEL is true if this is the outermost BLOCK. */
1259 static void
1260 expand_used_vars_for_block (tree block, bool toplevel)
1262 tree t;
1264 /* Expand all variables at this level. */
1265 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1266 if (TREE_USED (t)
1267 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1268 || !DECL_NONSHAREABLE (t)))
1269 expand_one_var (t, toplevel, true);
1271 /* Expand all variables at containing levels. */
1272 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1273 expand_used_vars_for_block (t, false);
1276 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1277 and clear TREE_USED on all local variables. */
1279 static void
1280 clear_tree_used (tree block)
1282 tree t;
1284 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1285 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1286 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1287 || !DECL_NONSHAREABLE (t))
1288 TREE_USED (t) = 0;
1290 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1291 clear_tree_used (t);
1294 enum {
1295 SPCT_FLAG_DEFAULT = 1,
1296 SPCT_FLAG_ALL = 2,
1297 SPCT_FLAG_STRONG = 3
1300 /* Examine TYPE and determine a bit mask of the following features. */
1302 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1303 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1304 #define SPCT_HAS_ARRAY 4
1305 #define SPCT_HAS_AGGREGATE 8
1307 static unsigned int
1308 stack_protect_classify_type (tree type)
1310 unsigned int ret = 0;
1311 tree t;
1313 switch (TREE_CODE (type))
1315 case ARRAY_TYPE:
1316 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1317 if (t == char_type_node
1318 || t == signed_char_type_node
1319 || t == unsigned_char_type_node)
1321 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1322 unsigned HOST_WIDE_INT len;
1324 if (!TYPE_SIZE_UNIT (type)
1325 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1326 len = max;
1327 else
1328 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1330 if (len < max)
1331 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1332 else
1333 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1335 else
1336 ret = SPCT_HAS_ARRAY;
1337 break;
1339 case UNION_TYPE:
1340 case QUAL_UNION_TYPE:
1341 case RECORD_TYPE:
1342 ret = SPCT_HAS_AGGREGATE;
1343 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1344 if (TREE_CODE (t) == FIELD_DECL)
1345 ret |= stack_protect_classify_type (TREE_TYPE (t));
1346 break;
1348 default:
1349 break;
1352 return ret;
1355 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1356 part of the local stack frame. Remember if we ever return nonzero for
1357 any variable in this function. The return value is the phase number in
1358 which the variable should be allocated. */
1360 static int
1361 stack_protect_decl_phase (tree decl)
1363 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1364 int ret = 0;
1366 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1367 has_short_buffer = true;
1369 if (flag_stack_protect == SPCT_FLAG_ALL
1370 || flag_stack_protect == SPCT_FLAG_STRONG)
1372 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1373 && !(bits & SPCT_HAS_AGGREGATE))
1374 ret = 1;
1375 else if (bits & SPCT_HAS_ARRAY)
1376 ret = 2;
1378 else
1379 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1381 if (ret)
1382 has_protected_decls = true;
1384 return ret;
1387 /* Two helper routines that check for phase 1 and phase 2. These are used
1388 as callbacks for expand_stack_vars. */
1390 static bool
1391 stack_protect_decl_phase_1 (size_t i)
1393 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1396 static bool
1397 stack_protect_decl_phase_2 (size_t i)
1399 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1402 /* And helper function that checks for asan phase (with stack protector
1403 it is phase 3). This is used as callback for expand_stack_vars.
1404 Returns true if any of the vars in the partition need to be protected. */
1406 static bool
1407 asan_decl_phase_3 (size_t i)
1409 while (i != EOC)
1411 if (asan_protect_stack_decl (stack_vars[i].decl))
1412 return true;
1413 i = stack_vars[i].next;
1415 return false;
1418 /* Ensure that variables in different stack protection phases conflict
1419 so that they are not merged and share the same stack slot. */
1421 static void
1422 add_stack_protection_conflicts (void)
1424 size_t i, j, n = stack_vars_num;
1425 unsigned char *phase;
1427 phase = XNEWVEC (unsigned char, n);
1428 for (i = 0; i < n; ++i)
1429 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1431 for (i = 0; i < n; ++i)
1433 unsigned char ph_i = phase[i];
1434 for (j = i + 1; j < n; ++j)
1435 if (ph_i != phase[j])
1436 add_stack_var_conflict (i, j);
1439 XDELETEVEC (phase);
1442 /* Create a decl for the guard at the top of the stack frame. */
1444 static void
1445 create_stack_guard (void)
1447 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1448 VAR_DECL, NULL, ptr_type_node);
1449 TREE_THIS_VOLATILE (guard) = 1;
1450 TREE_USED (guard) = 1;
1451 expand_one_stack_var (guard);
1452 crtl->stack_protect_guard = guard;
1455 /* Prepare for expanding variables. */
1456 static void
1457 init_vars_expansion (void)
1459 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1460 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1462 /* A map from decl to stack partition. */
1463 decl_to_stack_part = pointer_map_create ();
1465 /* Initialize local stack smashing state. */
1466 has_protected_decls = false;
1467 has_short_buffer = false;
1470 /* Free up stack variable graph data. */
1471 static void
1472 fini_vars_expansion (void)
1474 bitmap_obstack_release (&stack_var_bitmap_obstack);
1475 if (stack_vars)
1476 XDELETEVEC (stack_vars);
1477 if (stack_vars_sorted)
1478 XDELETEVEC (stack_vars_sorted);
1479 stack_vars = NULL;
1480 stack_vars_sorted = NULL;
1481 stack_vars_alloc = stack_vars_num = 0;
1482 pointer_map_destroy (decl_to_stack_part);
1483 decl_to_stack_part = NULL;
1486 /* Make a fair guess for the size of the stack frame of the function
1487 in NODE. This doesn't have to be exact, the result is only used in
1488 the inline heuristics. So we don't want to run the full stack var
1489 packing algorithm (which is quadratic in the number of stack vars).
1490 Instead, we calculate the total size of all stack vars. This turns
1491 out to be a pretty fair estimate -- packing of stack vars doesn't
1492 happen very often. */
1494 HOST_WIDE_INT
1495 estimated_stack_frame_size (struct cgraph_node *node)
1497 HOST_WIDE_INT size = 0;
1498 size_t i;
1499 tree var;
1500 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1502 push_cfun (fn);
1504 init_vars_expansion ();
1506 FOR_EACH_LOCAL_DECL (fn, i, var)
1507 if (auto_var_in_fn_p (var, fn->decl))
1508 size += expand_one_var (var, true, false);
1510 if (stack_vars_num > 0)
1512 /* Fake sorting the stack vars for account_stack_vars (). */
1513 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1514 for (i = 0; i < stack_vars_num; ++i)
1515 stack_vars_sorted[i] = i;
1516 size += account_stack_vars ();
1519 fini_vars_expansion ();
1520 pop_cfun ();
1521 return size;
1524 /* Helper routine to check if a record or union contains an array field. */
1526 static int
1527 record_or_union_type_has_array_p (const_tree tree_type)
1529 tree fields = TYPE_FIELDS (tree_type);
1530 tree f;
1532 for (f = fields; f; f = DECL_CHAIN (f))
1533 if (TREE_CODE (f) == FIELD_DECL)
1535 tree field_type = TREE_TYPE (f);
1536 if (RECORD_OR_UNION_TYPE_P (field_type)
1537 && record_or_union_type_has_array_p (field_type))
1538 return 1;
1539 if (TREE_CODE (field_type) == ARRAY_TYPE)
1540 return 1;
1542 return 0;
1545 /* Expand all variables used in the function. */
1547 static rtx
1548 expand_used_vars (void)
1550 tree var, outer_block = DECL_INITIAL (current_function_decl);
1551 vec<tree> maybe_local_decls = vNULL;
1552 rtx var_end_seq = NULL_RTX;
1553 struct pointer_map_t *ssa_name_decls;
1554 unsigned i;
1555 unsigned len;
1556 bool gen_stack_protect_signal = false;
1558 /* Compute the phase of the stack frame for this function. */
1560 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1561 int off = STARTING_FRAME_OFFSET % align;
1562 frame_phase = off ? align - off : 0;
1565 /* Set TREE_USED on all variables in the local_decls. */
1566 FOR_EACH_LOCAL_DECL (cfun, i, var)
1567 TREE_USED (var) = 1;
1568 /* Clear TREE_USED on all variables associated with a block scope. */
1569 clear_tree_used (DECL_INITIAL (current_function_decl));
1571 init_vars_expansion ();
1573 ssa_name_decls = pointer_map_create ();
1574 for (i = 0; i < SA.map->num_partitions; i++)
1576 tree var = partition_to_var (SA.map, i);
1578 gcc_assert (!virtual_operand_p (var));
1580 /* Assign decls to each SSA name partition, share decls for partitions
1581 we could have coalesced (those with the same type). */
1582 if (SSA_NAME_VAR (var) == NULL_TREE)
1584 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1585 if (!*slot)
1586 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1587 replace_ssa_name_symbol (var, (tree) *slot);
1590 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1591 expand_one_var (var, true, true);
1592 else
1594 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1595 contain the default def (representing the parm or result itself)
1596 we don't do anything here. But those which don't contain the
1597 default def (representing a temporary based on the parm/result)
1598 we need to allocate space just like for normal VAR_DECLs. */
1599 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1601 expand_one_var (var, true, true);
1602 gcc_assert (SA.partition_to_pseudo[i]);
1606 pointer_map_destroy (ssa_name_decls);
1608 if (flag_stack_protect == SPCT_FLAG_STRONG)
1609 FOR_EACH_LOCAL_DECL (cfun, i, var)
1610 if (!is_global_var (var))
1612 tree var_type = TREE_TYPE (var);
1613 /* Examine local referenced variables that have their addresses taken,
1614 contain an array, or are arrays. */
1615 if (TREE_CODE (var) == VAR_DECL
1616 && (TREE_CODE (var_type) == ARRAY_TYPE
1617 || TREE_ADDRESSABLE (var)
1618 || (RECORD_OR_UNION_TYPE_P (var_type)
1619 && record_or_union_type_has_array_p (var_type))))
1621 gen_stack_protect_signal = true;
1622 break;
1626 /* At this point all variables on the local_decls with TREE_USED
1627 set are not associated with any block scope. Lay them out. */
1629 len = vec_safe_length (cfun->local_decls);
1630 FOR_EACH_LOCAL_DECL (cfun, i, var)
1632 bool expand_now = false;
1634 /* Expanded above already. */
1635 if (is_gimple_reg (var))
1637 TREE_USED (var) = 0;
1638 goto next;
1640 /* We didn't set a block for static or extern because it's hard
1641 to tell the difference between a global variable (re)declared
1642 in a local scope, and one that's really declared there to
1643 begin with. And it doesn't really matter much, since we're
1644 not giving them stack space. Expand them now. */
1645 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1646 expand_now = true;
1648 /* If the variable is not associated with any block, then it
1649 was created by the optimizers, and could be live anywhere
1650 in the function. */
1651 else if (TREE_USED (var))
1652 expand_now = true;
1654 /* Finally, mark all variables on the list as used. We'll use
1655 this in a moment when we expand those associated with scopes. */
1656 TREE_USED (var) = 1;
1658 if (expand_now)
1659 expand_one_var (var, true, true);
1661 next:
1662 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1664 rtx rtl = DECL_RTL_IF_SET (var);
1666 /* Keep artificial non-ignored vars in cfun->local_decls
1667 chain until instantiate_decls. */
1668 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1669 add_local_decl (cfun, var);
1670 else if (rtl == NULL_RTX)
1671 /* If rtl isn't set yet, which can happen e.g. with
1672 -fstack-protector, retry before returning from this
1673 function. */
1674 maybe_local_decls.safe_push (var);
1678 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1680 +-----------------+-----------------+
1681 | ...processed... | ...duplicates...|
1682 +-----------------+-----------------+
1684 +-- LEN points here.
1686 We just want the duplicates, as those are the artificial
1687 non-ignored vars that we want to keep until instantiate_decls.
1688 Move them down and truncate the array. */
1689 if (!vec_safe_is_empty (cfun->local_decls))
1690 cfun->local_decls->block_remove (0, len);
1692 /* At this point, all variables within the block tree with TREE_USED
1693 set are actually used by the optimized function. Lay them out. */
1694 expand_used_vars_for_block (outer_block, true);
1696 if (stack_vars_num > 0)
1698 add_scope_conflicts ();
1700 /* If stack protection is enabled, we don't share space between
1701 vulnerable data and non-vulnerable data. */
1702 if (flag_stack_protect)
1703 add_stack_protection_conflicts ();
1705 /* Now that we have collected all stack variables, and have computed a
1706 minimal interference graph, attempt to save some stack space. */
1707 partition_stack_vars ();
1708 if (dump_file)
1709 dump_stack_var_partition ();
1712 switch (flag_stack_protect)
1714 case SPCT_FLAG_ALL:
1715 create_stack_guard ();
1716 break;
1718 case SPCT_FLAG_STRONG:
1719 if (gen_stack_protect_signal
1720 || cfun->calls_alloca || has_protected_decls)
1721 create_stack_guard ();
1722 break;
1724 case SPCT_FLAG_DEFAULT:
1725 if (cfun->calls_alloca || has_protected_decls)
1726 create_stack_guard();
1727 break;
1729 default:
1733 /* Assign rtl to each variable based on these partitions. */
1734 if (stack_vars_num > 0)
1736 struct stack_vars_data data;
1738 data.asan_vec = vNULL;
1739 data.asan_decl_vec = vNULL;
1741 /* Reorder decls to be protected by iterating over the variables
1742 array multiple times, and allocating out of each phase in turn. */
1743 /* ??? We could probably integrate this into the qsort we did
1744 earlier, such that we naturally see these variables first,
1745 and thus naturally allocate things in the right order. */
1746 if (has_protected_decls)
1748 /* Phase 1 contains only character arrays. */
1749 expand_stack_vars (stack_protect_decl_phase_1, &data);
1751 /* Phase 2 contains other kinds of arrays. */
1752 if (flag_stack_protect == 2)
1753 expand_stack_vars (stack_protect_decl_phase_2, &data);
1756 if (flag_sanitize & SANITIZE_ADDRESS)
1757 /* Phase 3, any partitions that need asan protection
1758 in addition to phase 1 and 2. */
1759 expand_stack_vars (asan_decl_phase_3, &data);
1761 if (!data.asan_vec.is_empty ())
1763 HOST_WIDE_INT prev_offset = frame_offset;
1764 HOST_WIDE_INT offset
1765 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1766 ASAN_RED_ZONE_SIZE);
1767 data.asan_vec.safe_push (prev_offset);
1768 data.asan_vec.safe_push (offset);
1770 var_end_seq
1771 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1772 data.asan_vec.address (),
1773 data.asan_decl_vec. address(),
1774 data.asan_vec.length ());
1777 expand_stack_vars (NULL, &data);
1779 data.asan_vec.release ();
1780 data.asan_decl_vec.release ();
1783 fini_vars_expansion ();
1785 /* If there were any artificial non-ignored vars without rtl
1786 found earlier, see if deferred stack allocation hasn't assigned
1787 rtl to them. */
1788 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1790 rtx rtl = DECL_RTL_IF_SET (var);
1792 /* Keep artificial non-ignored vars in cfun->local_decls
1793 chain until instantiate_decls. */
1794 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1795 add_local_decl (cfun, var);
1797 maybe_local_decls.release ();
1799 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1800 if (STACK_ALIGNMENT_NEEDED)
1802 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1803 if (!FRAME_GROWS_DOWNWARD)
1804 frame_offset += align - 1;
1805 frame_offset &= -align;
1808 return var_end_seq;
1812 /* If we need to produce a detailed dump, print the tree representation
1813 for STMT to the dump file. SINCE is the last RTX after which the RTL
1814 generated for STMT should have been appended. */
1816 static void
1817 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1819 if (dump_file && (dump_flags & TDF_DETAILS))
1821 fprintf (dump_file, "\n;; ");
1822 print_gimple_stmt (dump_file, stmt, 0,
1823 TDF_SLIM | (dump_flags & TDF_LINENO));
1824 fprintf (dump_file, "\n");
1826 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1830 /* Maps the blocks that do not contain tree labels to rtx labels. */
1832 static struct pointer_map_t *lab_rtx_for_bb;
1834 /* Returns the label_rtx expression for a label starting basic block BB. */
1836 static rtx
1837 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1839 gimple_stmt_iterator gsi;
1840 tree lab;
1841 gimple lab_stmt;
1842 void **elt;
1844 if (bb->flags & BB_RTL)
1845 return block_label (bb);
1847 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1848 if (elt)
1849 return (rtx) *elt;
1851 /* Find the tree label if it is present. */
1853 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1855 lab_stmt = gsi_stmt (gsi);
1856 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1857 break;
1859 lab = gimple_label_label (lab_stmt);
1860 if (DECL_NONLOCAL (lab))
1861 break;
1863 return label_rtx (lab);
1866 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1867 *elt = gen_label_rtx ();
1868 return (rtx) *elt;
1872 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1873 of a basic block where we just expanded the conditional at the end,
1874 possibly clean up the CFG and instruction sequence. LAST is the
1875 last instruction before the just emitted jump sequence. */
1877 static void
1878 maybe_cleanup_end_of_block (edge e, rtx last)
1880 /* Special case: when jumpif decides that the condition is
1881 trivial it emits an unconditional jump (and the necessary
1882 barrier). But we still have two edges, the fallthru one is
1883 wrong. purge_dead_edges would clean this up later. Unfortunately
1884 we have to insert insns (and split edges) before
1885 find_many_sub_basic_blocks and hence before purge_dead_edges.
1886 But splitting edges might create new blocks which depend on the
1887 fact that if there are two edges there's no barrier. So the
1888 barrier would get lost and verify_flow_info would ICE. Instead
1889 of auditing all edge splitters to care for the barrier (which
1890 normally isn't there in a cleaned CFG), fix it here. */
1891 if (BARRIER_P (get_last_insn ()))
1893 rtx insn;
1894 remove_edge (e);
1895 /* Now, we have a single successor block, if we have insns to
1896 insert on the remaining edge we potentially will insert
1897 it at the end of this block (if the dest block isn't feasible)
1898 in order to avoid splitting the edge. This insertion will take
1899 place in front of the last jump. But we might have emitted
1900 multiple jumps (conditional and one unconditional) to the
1901 same destination. Inserting in front of the last one then
1902 is a problem. See PR 40021. We fix this by deleting all
1903 jumps except the last unconditional one. */
1904 insn = PREV_INSN (get_last_insn ());
1905 /* Make sure we have an unconditional jump. Otherwise we're
1906 confused. */
1907 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1908 for (insn = PREV_INSN (insn); insn != last;)
1910 insn = PREV_INSN (insn);
1911 if (JUMP_P (NEXT_INSN (insn)))
1913 if (!any_condjump_p (NEXT_INSN (insn)))
1915 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1916 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1918 delete_insn (NEXT_INSN (insn));
1924 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1925 Returns a new basic block if we've terminated the current basic
1926 block and created a new one. */
1928 static basic_block
1929 expand_gimple_cond (basic_block bb, gimple stmt)
1931 basic_block new_bb, dest;
1932 edge new_edge;
1933 edge true_edge;
1934 edge false_edge;
1935 rtx last2, last;
1936 enum tree_code code;
1937 tree op0, op1;
1939 code = gimple_cond_code (stmt);
1940 op0 = gimple_cond_lhs (stmt);
1941 op1 = gimple_cond_rhs (stmt);
1942 /* We're sometimes presented with such code:
1943 D.123_1 = x < y;
1944 if (D.123_1 != 0)
1946 This would expand to two comparisons which then later might
1947 be cleaned up by combine. But some pattern matchers like if-conversion
1948 work better when there's only one compare, so make up for this
1949 here as special exception if TER would have made the same change. */
1950 if (SA.values
1951 && TREE_CODE (op0) == SSA_NAME
1952 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1953 && TREE_CODE (op1) == INTEGER_CST
1954 && ((gimple_cond_code (stmt) == NE_EXPR
1955 && integer_zerop (op1))
1956 || (gimple_cond_code (stmt) == EQ_EXPR
1957 && integer_onep (op1)))
1958 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1960 gimple second = SSA_NAME_DEF_STMT (op0);
1961 if (gimple_code (second) == GIMPLE_ASSIGN)
1963 enum tree_code code2 = gimple_assign_rhs_code (second);
1964 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1966 code = code2;
1967 op0 = gimple_assign_rhs1 (second);
1968 op1 = gimple_assign_rhs2 (second);
1970 /* If jumps are cheap turn some more codes into
1971 jumpy sequences. */
1972 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1974 if ((code2 == BIT_AND_EXPR
1975 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1976 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1977 || code2 == TRUTH_AND_EXPR)
1979 code = TRUTH_ANDIF_EXPR;
1980 op0 = gimple_assign_rhs1 (second);
1981 op1 = gimple_assign_rhs2 (second);
1983 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1985 code = TRUTH_ORIF_EXPR;
1986 op0 = gimple_assign_rhs1 (second);
1987 op1 = gimple_assign_rhs2 (second);
1993 last2 = last = get_last_insn ();
1995 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1996 set_curr_insn_location (gimple_location (stmt));
1998 /* These flags have no purpose in RTL land. */
1999 true_edge->flags &= ~EDGE_TRUE_VALUE;
2000 false_edge->flags &= ~EDGE_FALSE_VALUE;
2002 /* We can either have a pure conditional jump with one fallthru edge or
2003 two-way jump that needs to be decomposed into two basic blocks. */
2004 if (false_edge->dest == bb->next_bb)
2006 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2007 true_edge->probability);
2008 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2009 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2010 set_curr_insn_location (true_edge->goto_locus);
2011 false_edge->flags |= EDGE_FALLTHRU;
2012 maybe_cleanup_end_of_block (false_edge, last);
2013 return NULL;
2015 if (true_edge->dest == bb->next_bb)
2017 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2018 false_edge->probability);
2019 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2020 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2021 set_curr_insn_location (false_edge->goto_locus);
2022 true_edge->flags |= EDGE_FALLTHRU;
2023 maybe_cleanup_end_of_block (true_edge, last);
2024 return NULL;
2027 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2028 true_edge->probability);
2029 last = get_last_insn ();
2030 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2031 set_curr_insn_location (false_edge->goto_locus);
2032 emit_jump (label_rtx_for_bb (false_edge->dest));
2034 BB_END (bb) = last;
2035 if (BARRIER_P (BB_END (bb)))
2036 BB_END (bb) = PREV_INSN (BB_END (bb));
2037 update_bb_for_insn (bb);
2039 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2040 dest = false_edge->dest;
2041 redirect_edge_succ (false_edge, new_bb);
2042 false_edge->flags |= EDGE_FALLTHRU;
2043 new_bb->count = false_edge->count;
2044 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2045 if (current_loops && bb->loop_father)
2046 add_bb_to_loop (new_bb, bb->loop_father);
2047 new_edge = make_edge (new_bb, dest, 0);
2048 new_edge->probability = REG_BR_PROB_BASE;
2049 new_edge->count = new_bb->count;
2050 if (BARRIER_P (BB_END (new_bb)))
2051 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2052 update_bb_for_insn (new_bb);
2054 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2056 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2058 set_curr_insn_location (true_edge->goto_locus);
2059 true_edge->goto_locus = curr_insn_location ();
2062 return new_bb;
2065 /* Mark all calls that can have a transaction restart. */
2067 static void
2068 mark_transaction_restart_calls (gimple stmt)
2070 struct tm_restart_node dummy;
2071 void **slot;
2073 if (!cfun->gimple_df->tm_restart)
2074 return;
2076 dummy.stmt = stmt;
2077 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2078 if (slot)
2080 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2081 tree list = n->label_or_list;
2082 rtx insn;
2084 for (insn = next_real_insn (get_last_insn ());
2085 !CALL_P (insn);
2086 insn = next_real_insn (insn))
2087 continue;
2089 if (TREE_CODE (list) == LABEL_DECL)
2090 add_reg_note (insn, REG_TM, label_rtx (list));
2091 else
2092 for (; list ; list = TREE_CHAIN (list))
2093 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2097 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2098 statement STMT. */
2100 static void
2101 expand_call_stmt (gimple stmt)
2103 tree exp, decl, lhs;
2104 bool builtin_p;
2105 size_t i;
2107 if (gimple_call_internal_p (stmt))
2109 expand_internal_call (stmt);
2110 return;
2113 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2115 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2116 decl = gimple_call_fndecl (stmt);
2117 builtin_p = decl && DECL_BUILT_IN (decl);
2119 /* If this is not a builtin function, the function type through which the
2120 call is made may be different from the type of the function. */
2121 if (!builtin_p)
2122 CALL_EXPR_FN (exp)
2123 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2124 CALL_EXPR_FN (exp));
2126 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2127 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2129 for (i = 0; i < gimple_call_num_args (stmt); i++)
2131 tree arg = gimple_call_arg (stmt, i);
2132 gimple def;
2133 /* TER addresses into arguments of builtin functions so we have a
2134 chance to infer more correct alignment information. See PR39954. */
2135 if (builtin_p
2136 && TREE_CODE (arg) == SSA_NAME
2137 && (def = get_gimple_for_ssa_name (arg))
2138 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2139 arg = gimple_assign_rhs1 (def);
2140 CALL_EXPR_ARG (exp, i) = arg;
2143 if (gimple_has_side_effects (stmt))
2144 TREE_SIDE_EFFECTS (exp) = 1;
2146 if (gimple_call_nothrow_p (stmt))
2147 TREE_NOTHROW (exp) = 1;
2149 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2150 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2151 if (decl
2152 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2153 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2154 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2155 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2156 else
2157 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2158 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2159 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2161 /* Ensure RTL is created for debug args. */
2162 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2164 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2165 unsigned int ix;
2166 tree dtemp;
2168 if (debug_args)
2169 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2171 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2172 expand_debug_expr (dtemp);
2176 lhs = gimple_call_lhs (stmt);
2177 if (lhs)
2178 expand_assignment (lhs, exp, false);
2179 else
2180 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2182 mark_transaction_restart_calls (stmt);
2185 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2186 STMT that doesn't require special handling for outgoing edges. That
2187 is no tailcalls and no GIMPLE_COND. */
2189 static void
2190 expand_gimple_stmt_1 (gimple stmt)
2192 tree op0;
2194 set_curr_insn_location (gimple_location (stmt));
2196 switch (gimple_code (stmt))
2198 case GIMPLE_GOTO:
2199 op0 = gimple_goto_dest (stmt);
2200 if (TREE_CODE (op0) == LABEL_DECL)
2201 expand_goto (op0);
2202 else
2203 expand_computed_goto (op0);
2204 break;
2205 case GIMPLE_LABEL:
2206 expand_label (gimple_label_label (stmt));
2207 break;
2208 case GIMPLE_NOP:
2209 case GIMPLE_PREDICT:
2210 break;
2211 case GIMPLE_SWITCH:
2212 expand_case (stmt);
2213 break;
2214 case GIMPLE_ASM:
2215 expand_asm_stmt (stmt);
2216 break;
2217 case GIMPLE_CALL:
2218 expand_call_stmt (stmt);
2219 break;
2221 case GIMPLE_RETURN:
2222 op0 = gimple_return_retval (stmt);
2224 if (op0 && op0 != error_mark_node)
2226 tree result = DECL_RESULT (current_function_decl);
2228 /* If we are not returning the current function's RESULT_DECL,
2229 build an assignment to it. */
2230 if (op0 != result)
2232 /* I believe that a function's RESULT_DECL is unique. */
2233 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2235 /* ??? We'd like to use simply expand_assignment here,
2236 but this fails if the value is of BLKmode but the return
2237 decl is a register. expand_return has special handling
2238 for this combination, which eventually should move
2239 to common code. See comments there. Until then, let's
2240 build a modify expression :-/ */
2241 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2242 result, op0);
2245 if (!op0)
2246 expand_null_return ();
2247 else
2248 expand_return (op0);
2249 break;
2251 case GIMPLE_ASSIGN:
2253 tree lhs = gimple_assign_lhs (stmt);
2255 /* Tree expand used to fiddle with |= and &= of two bitfield
2256 COMPONENT_REFs here. This can't happen with gimple, the LHS
2257 of binary assigns must be a gimple reg. */
2259 if (TREE_CODE (lhs) != SSA_NAME
2260 || get_gimple_rhs_class (gimple_expr_code (stmt))
2261 == GIMPLE_SINGLE_RHS)
2263 tree rhs = gimple_assign_rhs1 (stmt);
2264 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2265 == GIMPLE_SINGLE_RHS);
2266 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2267 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2268 if (TREE_CLOBBER_P (rhs))
2269 /* This is a clobber to mark the going out of scope for
2270 this LHS. */
2272 else
2273 expand_assignment (lhs, rhs,
2274 gimple_assign_nontemporal_move_p (stmt));
2276 else
2278 rtx target, temp;
2279 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2280 struct separate_ops ops;
2281 bool promoted = false;
2283 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2284 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2285 promoted = true;
2287 ops.code = gimple_assign_rhs_code (stmt);
2288 ops.type = TREE_TYPE (lhs);
2289 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2291 case GIMPLE_TERNARY_RHS:
2292 ops.op2 = gimple_assign_rhs3 (stmt);
2293 /* Fallthru */
2294 case GIMPLE_BINARY_RHS:
2295 ops.op1 = gimple_assign_rhs2 (stmt);
2296 /* Fallthru */
2297 case GIMPLE_UNARY_RHS:
2298 ops.op0 = gimple_assign_rhs1 (stmt);
2299 break;
2300 default:
2301 gcc_unreachable ();
2303 ops.location = gimple_location (stmt);
2305 /* If we want to use a nontemporal store, force the value to
2306 register first. If we store into a promoted register,
2307 don't directly expand to target. */
2308 temp = nontemporal || promoted ? NULL_RTX : target;
2309 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2310 EXPAND_NORMAL);
2312 if (temp == target)
2314 else if (promoted)
2316 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2317 /* If TEMP is a VOIDmode constant, use convert_modes to make
2318 sure that we properly convert it. */
2319 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2321 temp = convert_modes (GET_MODE (target),
2322 TYPE_MODE (ops.type),
2323 temp, unsignedp);
2324 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2325 GET_MODE (target), temp, unsignedp);
2328 convert_move (SUBREG_REG (target), temp, unsignedp);
2330 else if (nontemporal && emit_storent_insn (target, temp))
2332 else
2334 temp = force_operand (temp, target);
2335 if (temp != target)
2336 emit_move_insn (target, temp);
2340 break;
2342 default:
2343 gcc_unreachable ();
2347 /* Expand one gimple statement STMT and return the last RTL instruction
2348 before any of the newly generated ones.
2350 In addition to generating the necessary RTL instructions this also
2351 sets REG_EH_REGION notes if necessary and sets the current source
2352 location for diagnostics. */
2354 static rtx
2355 expand_gimple_stmt (gimple stmt)
2357 location_t saved_location = input_location;
2358 rtx last = get_last_insn ();
2359 int lp_nr;
2361 gcc_assert (cfun);
2363 /* We need to save and restore the current source location so that errors
2364 discovered during expansion are emitted with the right location. But
2365 it would be better if the diagnostic routines used the source location
2366 embedded in the tree nodes rather than globals. */
2367 if (gimple_has_location (stmt))
2368 input_location = gimple_location (stmt);
2370 expand_gimple_stmt_1 (stmt);
2372 /* Free any temporaries used to evaluate this statement. */
2373 free_temp_slots ();
2375 input_location = saved_location;
2377 /* Mark all insns that may trap. */
2378 lp_nr = lookup_stmt_eh_lp (stmt);
2379 if (lp_nr)
2381 rtx insn;
2382 for (insn = next_real_insn (last); insn;
2383 insn = next_real_insn (insn))
2385 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2386 /* If we want exceptions for non-call insns, any
2387 may_trap_p instruction may throw. */
2388 && GET_CODE (PATTERN (insn)) != CLOBBER
2389 && GET_CODE (PATTERN (insn)) != USE
2390 && insn_could_throw_p (insn))
2391 make_reg_eh_region_note (insn, 0, lp_nr);
2395 return last;
2398 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2399 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2400 generated a tail call (something that might be denied by the ABI
2401 rules governing the call; see calls.c).
2403 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2404 can still reach the rest of BB. The case here is __builtin_sqrt,
2405 where the NaN result goes through the external function (with a
2406 tailcall) and the normal result happens via a sqrt instruction. */
2408 static basic_block
2409 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2411 rtx last2, last;
2412 edge e;
2413 edge_iterator ei;
2414 int probability;
2415 gcov_type count;
2417 last2 = last = expand_gimple_stmt (stmt);
2419 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2420 if (CALL_P (last) && SIBLING_CALL_P (last))
2421 goto found;
2423 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2425 *can_fallthru = true;
2426 return NULL;
2428 found:
2429 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2430 Any instructions emitted here are about to be deleted. */
2431 do_pending_stack_adjust ();
2433 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2434 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2435 EH or abnormal edges, we shouldn't have created a tail call in
2436 the first place. So it seems to me we should just be removing
2437 all edges here, or redirecting the existing fallthru edge to
2438 the exit block. */
2440 probability = 0;
2441 count = 0;
2443 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2445 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2447 if (e->dest != EXIT_BLOCK_PTR)
2449 e->dest->count -= e->count;
2450 e->dest->frequency -= EDGE_FREQUENCY (e);
2451 if (e->dest->count < 0)
2452 e->dest->count = 0;
2453 if (e->dest->frequency < 0)
2454 e->dest->frequency = 0;
2456 count += e->count;
2457 probability += e->probability;
2458 remove_edge (e);
2460 else
2461 ei_next (&ei);
2464 /* This is somewhat ugly: the call_expr expander often emits instructions
2465 after the sibcall (to perform the function return). These confuse the
2466 find_many_sub_basic_blocks code, so we need to get rid of these. */
2467 last = NEXT_INSN (last);
2468 gcc_assert (BARRIER_P (last));
2470 *can_fallthru = false;
2471 while (NEXT_INSN (last))
2473 /* For instance an sqrt builtin expander expands if with
2474 sibcall in the then and label for `else`. */
2475 if (LABEL_P (NEXT_INSN (last)))
2477 *can_fallthru = true;
2478 break;
2480 delete_insn (NEXT_INSN (last));
2483 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2484 e->probability += probability;
2485 e->count += count;
2486 BB_END (bb) = last;
2487 update_bb_for_insn (bb);
2489 if (NEXT_INSN (last))
2491 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2493 last = BB_END (bb);
2494 if (BARRIER_P (last))
2495 BB_END (bb) = PREV_INSN (last);
2498 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2500 return bb;
2503 /* Return the difference between the floor and the truncated result of
2504 a signed division by OP1 with remainder MOD. */
2505 static rtx
2506 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2508 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2509 return gen_rtx_IF_THEN_ELSE
2510 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2511 gen_rtx_IF_THEN_ELSE
2512 (mode, gen_rtx_LT (BImode,
2513 gen_rtx_DIV (mode, op1, mod),
2514 const0_rtx),
2515 constm1_rtx, const0_rtx),
2516 const0_rtx);
2519 /* Return the difference between the ceil and the truncated result of
2520 a signed division by OP1 with remainder MOD. */
2521 static rtx
2522 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2524 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2525 return gen_rtx_IF_THEN_ELSE
2526 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2527 gen_rtx_IF_THEN_ELSE
2528 (mode, gen_rtx_GT (BImode,
2529 gen_rtx_DIV (mode, op1, mod),
2530 const0_rtx),
2531 const1_rtx, const0_rtx),
2532 const0_rtx);
2535 /* Return the difference between the ceil and the truncated result of
2536 an unsigned division by OP1 with remainder MOD. */
2537 static rtx
2538 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2540 /* (mod != 0 ? 1 : 0) */
2541 return gen_rtx_IF_THEN_ELSE
2542 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2543 const1_rtx, const0_rtx);
2546 /* Return the difference between the rounded and the truncated result
2547 of a signed division by OP1 with remainder MOD. Halfway cases are
2548 rounded away from zero, rather than to the nearest even number. */
2549 static rtx
2550 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2552 /* (abs (mod) >= abs (op1) - abs (mod)
2553 ? (op1 / mod > 0 ? 1 : -1)
2554 : 0) */
2555 return gen_rtx_IF_THEN_ELSE
2556 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2557 gen_rtx_MINUS (mode,
2558 gen_rtx_ABS (mode, op1),
2559 gen_rtx_ABS (mode, mod))),
2560 gen_rtx_IF_THEN_ELSE
2561 (mode, gen_rtx_GT (BImode,
2562 gen_rtx_DIV (mode, op1, mod),
2563 const0_rtx),
2564 const1_rtx, constm1_rtx),
2565 const0_rtx);
2568 /* Return the difference between the rounded and the truncated result
2569 of a unsigned division by OP1 with remainder MOD. Halfway cases
2570 are rounded away from zero, rather than to the nearest even
2571 number. */
2572 static rtx
2573 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2575 /* (mod >= op1 - mod ? 1 : 0) */
2576 return gen_rtx_IF_THEN_ELSE
2577 (mode, gen_rtx_GE (BImode, mod,
2578 gen_rtx_MINUS (mode, op1, mod)),
2579 const1_rtx, const0_rtx);
2582 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2583 any rtl. */
2585 static rtx
2586 convert_debug_memory_address (enum machine_mode mode, rtx x,
2587 addr_space_t as)
2589 enum machine_mode xmode = GET_MODE (x);
2591 #ifndef POINTERS_EXTEND_UNSIGNED
2592 gcc_assert (mode == Pmode
2593 || mode == targetm.addr_space.address_mode (as));
2594 gcc_assert (xmode == mode || xmode == VOIDmode);
2595 #else
2596 rtx temp;
2598 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2600 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2601 return x;
2603 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2604 x = simplify_gen_subreg (mode, x, xmode,
2605 subreg_lowpart_offset
2606 (mode, xmode));
2607 else if (POINTERS_EXTEND_UNSIGNED > 0)
2608 x = gen_rtx_ZERO_EXTEND (mode, x);
2609 else if (!POINTERS_EXTEND_UNSIGNED)
2610 x = gen_rtx_SIGN_EXTEND (mode, x);
2611 else
2613 switch (GET_CODE (x))
2615 case SUBREG:
2616 if ((SUBREG_PROMOTED_VAR_P (x)
2617 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2618 || (GET_CODE (SUBREG_REG (x)) == PLUS
2619 && REG_P (XEXP (SUBREG_REG (x), 0))
2620 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2621 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2622 && GET_MODE (SUBREG_REG (x)) == mode)
2623 return SUBREG_REG (x);
2624 break;
2625 case LABEL_REF:
2626 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2627 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2628 return temp;
2629 case SYMBOL_REF:
2630 temp = shallow_copy_rtx (x);
2631 PUT_MODE (temp, mode);
2632 return temp;
2633 case CONST:
2634 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2635 if (temp)
2636 temp = gen_rtx_CONST (mode, temp);
2637 return temp;
2638 case PLUS:
2639 case MINUS:
2640 if (CONST_INT_P (XEXP (x, 1)))
2642 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2643 if (temp)
2644 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2646 break;
2647 default:
2648 break;
2650 /* Don't know how to express ptr_extend as operation in debug info. */
2651 return NULL;
2653 #endif /* POINTERS_EXTEND_UNSIGNED */
2655 return x;
2658 /* Return an RTX equivalent to the value of the parameter DECL. */
2660 static rtx
2661 expand_debug_parm_decl (tree decl)
2663 rtx incoming = DECL_INCOMING_RTL (decl);
2665 if (incoming
2666 && GET_MODE (incoming) != BLKmode
2667 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2668 || (MEM_P (incoming)
2669 && REG_P (XEXP (incoming, 0))
2670 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2672 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2674 #ifdef HAVE_window_save
2675 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2676 If the target machine has an explicit window save instruction, the
2677 actual entry value is the corresponding OUTGOING_REGNO instead. */
2678 if (REG_P (incoming)
2679 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2680 incoming
2681 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2682 OUTGOING_REGNO (REGNO (incoming)), 0);
2683 else if (MEM_P (incoming))
2685 rtx reg = XEXP (incoming, 0);
2686 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2688 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2689 incoming = replace_equiv_address_nv (incoming, reg);
2691 else
2692 incoming = copy_rtx (incoming);
2694 #endif
2696 ENTRY_VALUE_EXP (rtl) = incoming;
2697 return rtl;
2700 if (incoming
2701 && GET_MODE (incoming) != BLKmode
2702 && !TREE_ADDRESSABLE (decl)
2703 && MEM_P (incoming)
2704 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2705 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2706 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2707 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2708 return copy_rtx (incoming);
2710 return NULL_RTX;
2713 /* Return an RTX equivalent to the value of the tree expression EXP. */
2715 static rtx
2716 expand_debug_expr (tree exp)
2718 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2720 enum machine_mode inner_mode = VOIDmode;
2721 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2722 addr_space_t as;
2724 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2726 case tcc_expression:
2727 switch (TREE_CODE (exp))
2729 case COND_EXPR:
2730 case DOT_PROD_EXPR:
2731 case WIDEN_MULT_PLUS_EXPR:
2732 case WIDEN_MULT_MINUS_EXPR:
2733 case FMA_EXPR:
2734 goto ternary;
2736 case TRUTH_ANDIF_EXPR:
2737 case TRUTH_ORIF_EXPR:
2738 case TRUTH_AND_EXPR:
2739 case TRUTH_OR_EXPR:
2740 case TRUTH_XOR_EXPR:
2741 goto binary;
2743 case TRUTH_NOT_EXPR:
2744 goto unary;
2746 default:
2747 break;
2749 break;
2751 ternary:
2752 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2753 if (!op2)
2754 return NULL_RTX;
2755 /* Fall through. */
2757 binary:
2758 case tcc_binary:
2759 case tcc_comparison:
2760 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2761 if (!op1)
2762 return NULL_RTX;
2763 /* Fall through. */
2765 unary:
2766 case tcc_unary:
2767 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2768 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2769 if (!op0)
2770 return NULL_RTX;
2771 break;
2773 case tcc_type:
2774 case tcc_statement:
2775 gcc_unreachable ();
2777 case tcc_constant:
2778 case tcc_exceptional:
2779 case tcc_declaration:
2780 case tcc_reference:
2781 case tcc_vl_exp:
2782 break;
2785 switch (TREE_CODE (exp))
2787 case STRING_CST:
2788 if (!lookup_constant_def (exp))
2790 if (strlen (TREE_STRING_POINTER (exp)) + 1
2791 != (size_t) TREE_STRING_LENGTH (exp))
2792 return NULL_RTX;
2793 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2794 op0 = gen_rtx_MEM (BLKmode, op0);
2795 set_mem_attributes (op0, exp, 0);
2796 return op0;
2798 /* Fall through... */
2800 case INTEGER_CST:
2801 case REAL_CST:
2802 case FIXED_CST:
2803 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2804 return op0;
2806 case COMPLEX_CST:
2807 gcc_assert (COMPLEX_MODE_P (mode));
2808 op0 = expand_debug_expr (TREE_REALPART (exp));
2809 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2810 return gen_rtx_CONCAT (mode, op0, op1);
2812 case DEBUG_EXPR_DECL:
2813 op0 = DECL_RTL_IF_SET (exp);
2815 if (op0)
2816 return op0;
2818 op0 = gen_rtx_DEBUG_EXPR (mode);
2819 DEBUG_EXPR_TREE_DECL (op0) = exp;
2820 SET_DECL_RTL (exp, op0);
2822 return op0;
2824 case VAR_DECL:
2825 case PARM_DECL:
2826 case FUNCTION_DECL:
2827 case LABEL_DECL:
2828 case CONST_DECL:
2829 case RESULT_DECL:
2830 op0 = DECL_RTL_IF_SET (exp);
2832 /* This decl was probably optimized away. */
2833 if (!op0)
2835 if (TREE_CODE (exp) != VAR_DECL
2836 || DECL_EXTERNAL (exp)
2837 || !TREE_STATIC (exp)
2838 || !DECL_NAME (exp)
2839 || DECL_HARD_REGISTER (exp)
2840 || DECL_IN_CONSTANT_POOL (exp)
2841 || mode == VOIDmode)
2842 return NULL;
2844 op0 = make_decl_rtl_for_debug (exp);
2845 if (!MEM_P (op0)
2846 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2847 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2848 return NULL;
2850 else
2851 op0 = copy_rtx (op0);
2853 if (GET_MODE (op0) == BLKmode
2854 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2855 below would ICE. While it is likely a FE bug,
2856 try to be robust here. See PR43166. */
2857 || mode == BLKmode
2858 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2860 gcc_assert (MEM_P (op0));
2861 op0 = adjust_address_nv (op0, mode, 0);
2862 return op0;
2865 /* Fall through. */
2867 adjust_mode:
2868 case PAREN_EXPR:
2869 case NOP_EXPR:
2870 case CONVERT_EXPR:
2872 inner_mode = GET_MODE (op0);
2874 if (mode == inner_mode)
2875 return op0;
2877 if (inner_mode == VOIDmode)
2879 if (TREE_CODE (exp) == SSA_NAME)
2880 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2881 else
2882 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2883 if (mode == inner_mode)
2884 return op0;
2887 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2889 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2890 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2891 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2892 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2893 else
2894 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2896 else if (FLOAT_MODE_P (mode))
2898 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2899 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2900 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2901 else
2902 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2904 else if (FLOAT_MODE_P (inner_mode))
2906 if (unsignedp)
2907 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2908 else
2909 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2911 else if (CONSTANT_P (op0)
2912 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2913 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2914 subreg_lowpart_offset (mode,
2915 inner_mode));
2916 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2917 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2918 : unsignedp)
2919 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2920 else
2921 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2923 return op0;
2926 case MEM_REF:
2927 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2929 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2930 TREE_OPERAND (exp, 0),
2931 TREE_OPERAND (exp, 1));
2932 if (newexp)
2933 return expand_debug_expr (newexp);
2935 /* FALLTHROUGH */
2936 case INDIRECT_REF:
2937 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2938 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2939 if (!op0)
2940 return NULL;
2942 if (TREE_CODE (exp) == MEM_REF)
2944 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2945 || (GET_CODE (op0) == PLUS
2946 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2947 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2948 Instead just use get_inner_reference. */
2949 goto component_ref;
2951 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2952 if (!op1 || !CONST_INT_P (op1))
2953 return NULL;
2955 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2958 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2959 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2960 else
2961 as = ADDR_SPACE_GENERIC;
2963 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2964 op0, as);
2965 if (op0 == NULL_RTX)
2966 return NULL;
2968 op0 = gen_rtx_MEM (mode, op0);
2969 set_mem_attributes (op0, exp, 0);
2970 if (TREE_CODE (exp) == MEM_REF
2971 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2972 set_mem_expr (op0, NULL_TREE);
2973 set_mem_addr_space (op0, as);
2975 return op0;
2977 case TARGET_MEM_REF:
2978 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2979 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2980 return NULL;
2982 op0 = expand_debug_expr
2983 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2984 if (!op0)
2985 return NULL;
2987 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2988 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2989 else
2990 as = ADDR_SPACE_GENERIC;
2992 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2993 op0, as);
2994 if (op0 == NULL_RTX)
2995 return NULL;
2997 op0 = gen_rtx_MEM (mode, op0);
2999 set_mem_attributes (op0, exp, 0);
3000 set_mem_addr_space (op0, as);
3002 return op0;
3004 component_ref:
3005 case ARRAY_REF:
3006 case ARRAY_RANGE_REF:
3007 case COMPONENT_REF:
3008 case BIT_FIELD_REF:
3009 case REALPART_EXPR:
3010 case IMAGPART_EXPR:
3011 case VIEW_CONVERT_EXPR:
3013 enum machine_mode mode1;
3014 HOST_WIDE_INT bitsize, bitpos;
3015 tree offset;
3016 int volatilep = 0;
3017 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3018 &mode1, &unsignedp, &volatilep, false);
3019 rtx orig_op0;
3021 if (bitsize == 0)
3022 return NULL;
3024 orig_op0 = op0 = expand_debug_expr (tem);
3026 if (!op0)
3027 return NULL;
3029 if (offset)
3031 enum machine_mode addrmode, offmode;
3033 if (!MEM_P (op0))
3034 return NULL;
3036 op0 = XEXP (op0, 0);
3037 addrmode = GET_MODE (op0);
3038 if (addrmode == VOIDmode)
3039 addrmode = Pmode;
3041 op1 = expand_debug_expr (offset);
3042 if (!op1)
3043 return NULL;
3045 offmode = GET_MODE (op1);
3046 if (offmode == VOIDmode)
3047 offmode = TYPE_MODE (TREE_TYPE (offset));
3049 if (addrmode != offmode)
3050 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3051 subreg_lowpart_offset (addrmode,
3052 offmode));
3054 /* Don't use offset_address here, we don't need a
3055 recognizable address, and we don't want to generate
3056 code. */
3057 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3058 op0, op1));
3061 if (MEM_P (op0))
3063 if (mode1 == VOIDmode)
3064 /* Bitfield. */
3065 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
3066 if (bitpos >= BITS_PER_UNIT)
3068 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3069 bitpos %= BITS_PER_UNIT;
3071 else if (bitpos < 0)
3073 HOST_WIDE_INT units
3074 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
3075 op0 = adjust_address_nv (op0, mode1, units);
3076 bitpos += units * BITS_PER_UNIT;
3078 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3079 op0 = adjust_address_nv (op0, mode, 0);
3080 else if (GET_MODE (op0) != mode1)
3081 op0 = adjust_address_nv (op0, mode1, 0);
3082 else
3083 op0 = copy_rtx (op0);
3084 if (op0 == orig_op0)
3085 op0 = shallow_copy_rtx (op0);
3086 set_mem_attributes (op0, exp, 0);
3089 if (bitpos == 0 && mode == GET_MODE (op0))
3090 return op0;
3092 if (bitpos < 0)
3093 return NULL;
3095 if (GET_MODE (op0) == BLKmode)
3096 return NULL;
3098 if ((bitpos % BITS_PER_UNIT) == 0
3099 && bitsize == GET_MODE_BITSIZE (mode1))
3101 enum machine_mode opmode = GET_MODE (op0);
3103 if (opmode == VOIDmode)
3104 opmode = TYPE_MODE (TREE_TYPE (tem));
3106 /* This condition may hold if we're expanding the address
3107 right past the end of an array that turned out not to
3108 be addressable (i.e., the address was only computed in
3109 debug stmts). The gen_subreg below would rightfully
3110 crash, and the address doesn't really exist, so just
3111 drop it. */
3112 if (bitpos >= GET_MODE_BITSIZE (opmode))
3113 return NULL;
3115 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3116 return simplify_gen_subreg (mode, op0, opmode,
3117 bitpos / BITS_PER_UNIT);
3120 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3121 && TYPE_UNSIGNED (TREE_TYPE (exp))
3122 ? SIGN_EXTRACT
3123 : ZERO_EXTRACT, mode,
3124 GET_MODE (op0) != VOIDmode
3125 ? GET_MODE (op0)
3126 : TYPE_MODE (TREE_TYPE (tem)),
3127 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3130 case ABS_EXPR:
3131 return simplify_gen_unary (ABS, mode, op0, mode);
3133 case NEGATE_EXPR:
3134 return simplify_gen_unary (NEG, mode, op0, mode);
3136 case BIT_NOT_EXPR:
3137 return simplify_gen_unary (NOT, mode, op0, mode);
3139 case FLOAT_EXPR:
3140 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3141 0)))
3142 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3143 inner_mode);
3145 case FIX_TRUNC_EXPR:
3146 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3147 inner_mode);
3149 case POINTER_PLUS_EXPR:
3150 /* For the rare target where pointers are not the same size as
3151 size_t, we need to check for mis-matched modes and correct
3152 the addend. */
3153 if (op0 && op1
3154 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3155 && GET_MODE (op0) != GET_MODE (op1))
3157 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
3158 /* If OP0 is a partial mode, then we must truncate, even if it has
3159 the same bitsize as OP1 as GCC's representation of partial modes
3160 is opaque. */
3161 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
3162 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
3163 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3164 GET_MODE (op1));
3165 else
3166 /* We always sign-extend, regardless of the signedness of
3167 the operand, because the operand is always unsigned
3168 here even if the original C expression is signed. */
3169 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3170 GET_MODE (op1));
3172 /* Fall through. */
3173 case PLUS_EXPR:
3174 return simplify_gen_binary (PLUS, mode, op0, op1);
3176 case MINUS_EXPR:
3177 return simplify_gen_binary (MINUS, mode, op0, op1);
3179 case MULT_EXPR:
3180 return simplify_gen_binary (MULT, mode, op0, op1);
3182 case RDIV_EXPR:
3183 case TRUNC_DIV_EXPR:
3184 case EXACT_DIV_EXPR:
3185 if (unsignedp)
3186 return simplify_gen_binary (UDIV, mode, op0, op1);
3187 else
3188 return simplify_gen_binary (DIV, mode, op0, op1);
3190 case TRUNC_MOD_EXPR:
3191 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3193 case FLOOR_DIV_EXPR:
3194 if (unsignedp)
3195 return simplify_gen_binary (UDIV, mode, op0, op1);
3196 else
3198 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3199 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3200 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3201 return simplify_gen_binary (PLUS, mode, div, adj);
3204 case FLOOR_MOD_EXPR:
3205 if (unsignedp)
3206 return simplify_gen_binary (UMOD, mode, op0, op1);
3207 else
3209 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3210 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3211 adj = simplify_gen_unary (NEG, mode,
3212 simplify_gen_binary (MULT, mode, adj, op1),
3213 mode);
3214 return simplify_gen_binary (PLUS, mode, mod, adj);
3217 case CEIL_DIV_EXPR:
3218 if (unsignedp)
3220 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3221 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3222 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3223 return simplify_gen_binary (PLUS, mode, div, adj);
3225 else
3227 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3228 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3229 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3230 return simplify_gen_binary (PLUS, mode, div, adj);
3233 case CEIL_MOD_EXPR:
3234 if (unsignedp)
3236 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3237 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3238 adj = simplify_gen_unary (NEG, mode,
3239 simplify_gen_binary (MULT, mode, adj, op1),
3240 mode);
3241 return simplify_gen_binary (PLUS, mode, mod, adj);
3243 else
3245 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3246 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3247 adj = simplify_gen_unary (NEG, mode,
3248 simplify_gen_binary (MULT, mode, adj, op1),
3249 mode);
3250 return simplify_gen_binary (PLUS, mode, mod, adj);
3253 case ROUND_DIV_EXPR:
3254 if (unsignedp)
3256 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3257 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3258 rtx adj = round_udiv_adjust (mode, mod, op1);
3259 return simplify_gen_binary (PLUS, mode, div, adj);
3261 else
3263 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3264 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3265 rtx adj = round_sdiv_adjust (mode, mod, op1);
3266 return simplify_gen_binary (PLUS, mode, div, adj);
3269 case ROUND_MOD_EXPR:
3270 if (unsignedp)
3272 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3273 rtx adj = round_udiv_adjust (mode, mod, op1);
3274 adj = simplify_gen_unary (NEG, mode,
3275 simplify_gen_binary (MULT, mode, adj, op1),
3276 mode);
3277 return simplify_gen_binary (PLUS, mode, mod, adj);
3279 else
3281 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3282 rtx adj = round_sdiv_adjust (mode, mod, op1);
3283 adj = simplify_gen_unary (NEG, mode,
3284 simplify_gen_binary (MULT, mode, adj, op1),
3285 mode);
3286 return simplify_gen_binary (PLUS, mode, mod, adj);
3289 case LSHIFT_EXPR:
3290 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3292 case RSHIFT_EXPR:
3293 if (unsignedp)
3294 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3295 else
3296 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3298 case LROTATE_EXPR:
3299 return simplify_gen_binary (ROTATE, mode, op0, op1);
3301 case RROTATE_EXPR:
3302 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3304 case MIN_EXPR:
3305 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3307 case MAX_EXPR:
3308 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3310 case BIT_AND_EXPR:
3311 case TRUTH_AND_EXPR:
3312 return simplify_gen_binary (AND, mode, op0, op1);
3314 case BIT_IOR_EXPR:
3315 case TRUTH_OR_EXPR:
3316 return simplify_gen_binary (IOR, mode, op0, op1);
3318 case BIT_XOR_EXPR:
3319 case TRUTH_XOR_EXPR:
3320 return simplify_gen_binary (XOR, mode, op0, op1);
3322 case TRUTH_ANDIF_EXPR:
3323 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3325 case TRUTH_ORIF_EXPR:
3326 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3328 case TRUTH_NOT_EXPR:
3329 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3331 case LT_EXPR:
3332 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3333 op0, op1);
3335 case LE_EXPR:
3336 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3337 op0, op1);
3339 case GT_EXPR:
3340 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3341 op0, op1);
3343 case GE_EXPR:
3344 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3345 op0, op1);
3347 case EQ_EXPR:
3348 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3350 case NE_EXPR:
3351 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3353 case UNORDERED_EXPR:
3354 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3356 case ORDERED_EXPR:
3357 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3359 case UNLT_EXPR:
3360 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3362 case UNLE_EXPR:
3363 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3365 case UNGT_EXPR:
3366 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3368 case UNGE_EXPR:
3369 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3371 case UNEQ_EXPR:
3372 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3374 case LTGT_EXPR:
3375 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3377 case COND_EXPR:
3378 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3380 case COMPLEX_EXPR:
3381 gcc_assert (COMPLEX_MODE_P (mode));
3382 if (GET_MODE (op0) == VOIDmode)
3383 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3384 if (GET_MODE (op1) == VOIDmode)
3385 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3386 return gen_rtx_CONCAT (mode, op0, op1);
3388 case CONJ_EXPR:
3389 if (GET_CODE (op0) == CONCAT)
3390 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3391 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3392 XEXP (op0, 1),
3393 GET_MODE_INNER (mode)));
3394 else
3396 enum machine_mode imode = GET_MODE_INNER (mode);
3397 rtx re, im;
3399 if (MEM_P (op0))
3401 re = adjust_address_nv (op0, imode, 0);
3402 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3404 else
3406 enum machine_mode ifmode = int_mode_for_mode (mode);
3407 enum machine_mode ihmode = int_mode_for_mode (imode);
3408 rtx halfsize;
3409 if (ifmode == BLKmode || ihmode == BLKmode)
3410 return NULL;
3411 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3412 re = op0;
3413 if (mode != ifmode)
3414 re = gen_rtx_SUBREG (ifmode, re, 0);
3415 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3416 if (imode != ihmode)
3417 re = gen_rtx_SUBREG (imode, re, 0);
3418 im = copy_rtx (op0);
3419 if (mode != ifmode)
3420 im = gen_rtx_SUBREG (ifmode, im, 0);
3421 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3422 if (imode != ihmode)
3423 im = gen_rtx_SUBREG (imode, im, 0);
3425 im = gen_rtx_NEG (imode, im);
3426 return gen_rtx_CONCAT (mode, re, im);
3429 case ADDR_EXPR:
3430 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3431 if (!op0 || !MEM_P (op0))
3433 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3434 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3435 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3436 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3437 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3438 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3440 if (handled_component_p (TREE_OPERAND (exp, 0)))
3442 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3443 tree decl
3444 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3445 &bitoffset, &bitsize, &maxsize);
3446 if ((TREE_CODE (decl) == VAR_DECL
3447 || TREE_CODE (decl) == PARM_DECL
3448 || TREE_CODE (decl) == RESULT_DECL)
3449 && (!TREE_ADDRESSABLE (decl)
3450 || target_for_debug_bind (decl))
3451 && (bitoffset % BITS_PER_UNIT) == 0
3452 && bitsize > 0
3453 && bitsize == maxsize)
3455 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3456 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3460 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
3461 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
3462 == ADDR_EXPR)
3464 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3465 0));
3466 if (op0 != NULL
3467 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3468 || (GET_CODE (op0) == PLUS
3469 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
3470 && CONST_INT_P (XEXP (op0, 1)))))
3472 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3473 1));
3474 if (!op1 || !CONST_INT_P (op1))
3475 return NULL;
3477 return plus_constant (mode, op0, INTVAL (op1));
3481 return NULL;
3484 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3485 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3487 return op0;
3489 case VECTOR_CST:
3491 unsigned i;
3493 op0 = gen_rtx_CONCATN
3494 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3496 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3498 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3499 if (!op1)
3500 return NULL;
3501 XVECEXP (op0, 0, i) = op1;
3504 return op0;
3507 case CONSTRUCTOR:
3508 if (TREE_CLOBBER_P (exp))
3509 return NULL;
3510 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3512 unsigned i;
3513 tree val;
3515 op0 = gen_rtx_CONCATN
3516 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3518 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3520 op1 = expand_debug_expr (val);
3521 if (!op1)
3522 return NULL;
3523 XVECEXP (op0, 0, i) = op1;
3526 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3528 op1 = expand_debug_expr
3529 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3531 if (!op1)
3532 return NULL;
3534 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3535 XVECEXP (op0, 0, i) = op1;
3538 return op0;
3540 else
3541 goto flag_unsupported;
3543 case CALL_EXPR:
3544 /* ??? Maybe handle some builtins? */
3545 return NULL;
3547 case SSA_NAME:
3549 gimple g = get_gimple_for_ssa_name (exp);
3550 if (g)
3552 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3553 if (!op0)
3554 return NULL;
3556 else
3558 int part = var_to_partition (SA.map, exp);
3560 if (part == NO_PARTITION)
3562 /* If this is a reference to an incoming value of parameter
3563 that is never used in the code or where the incoming
3564 value is never used in the code, use PARM_DECL's
3565 DECL_RTL if set. */
3566 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3567 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3569 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3570 if (op0)
3571 goto adjust_mode;
3572 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3573 if (op0)
3574 goto adjust_mode;
3576 return NULL;
3579 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3581 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3583 goto adjust_mode;
3586 case ERROR_MARK:
3587 return NULL;
3589 /* Vector stuff. For most of the codes we don't have rtl codes. */
3590 case REALIGN_LOAD_EXPR:
3591 case REDUC_MAX_EXPR:
3592 case REDUC_MIN_EXPR:
3593 case REDUC_PLUS_EXPR:
3594 case VEC_COND_EXPR:
3595 case VEC_LSHIFT_EXPR:
3596 case VEC_PACK_FIX_TRUNC_EXPR:
3597 case VEC_PACK_SAT_EXPR:
3598 case VEC_PACK_TRUNC_EXPR:
3599 case VEC_RSHIFT_EXPR:
3600 case VEC_UNPACK_FLOAT_HI_EXPR:
3601 case VEC_UNPACK_FLOAT_LO_EXPR:
3602 case VEC_UNPACK_HI_EXPR:
3603 case VEC_UNPACK_LO_EXPR:
3604 case VEC_WIDEN_MULT_HI_EXPR:
3605 case VEC_WIDEN_MULT_LO_EXPR:
3606 case VEC_WIDEN_MULT_EVEN_EXPR:
3607 case VEC_WIDEN_MULT_ODD_EXPR:
3608 case VEC_WIDEN_LSHIFT_HI_EXPR:
3609 case VEC_WIDEN_LSHIFT_LO_EXPR:
3610 case VEC_PERM_EXPR:
3611 return NULL;
3613 /* Misc codes. */
3614 case ADDR_SPACE_CONVERT_EXPR:
3615 case FIXED_CONVERT_EXPR:
3616 case OBJ_TYPE_REF:
3617 case WITH_SIZE_EXPR:
3618 return NULL;
3620 case DOT_PROD_EXPR:
3621 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3622 && SCALAR_INT_MODE_P (mode))
3625 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3626 0)))
3627 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3628 inner_mode);
3630 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3631 1)))
3632 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3633 inner_mode);
3634 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3635 return simplify_gen_binary (PLUS, mode, op0, op2);
3637 return NULL;
3639 case WIDEN_MULT_EXPR:
3640 case WIDEN_MULT_PLUS_EXPR:
3641 case WIDEN_MULT_MINUS_EXPR:
3642 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3643 && SCALAR_INT_MODE_P (mode))
3645 inner_mode = GET_MODE (op0);
3646 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3647 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3648 else
3649 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3650 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3651 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3652 else
3653 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3654 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3655 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3656 return op0;
3657 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3658 return simplify_gen_binary (PLUS, mode, op0, op2);
3659 else
3660 return simplify_gen_binary (MINUS, mode, op2, op0);
3662 return NULL;
3664 case MULT_HIGHPART_EXPR:
3665 /* ??? Similar to the above. */
3666 return NULL;
3668 case WIDEN_SUM_EXPR:
3669 case WIDEN_LSHIFT_EXPR:
3670 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3671 && SCALAR_INT_MODE_P (mode))
3674 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3675 0)))
3676 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3677 inner_mode);
3678 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3679 ? ASHIFT : PLUS, mode, op0, op1);
3681 return NULL;
3683 case FMA_EXPR:
3684 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3686 default:
3687 flag_unsupported:
3688 #ifdef ENABLE_CHECKING
3689 debug_tree (exp);
3690 gcc_unreachable ();
3691 #else
3692 return NULL;
3693 #endif
3697 /* Return an RTX equivalent to the source bind value of the tree expression
3698 EXP. */
3700 static rtx
3701 expand_debug_source_expr (tree exp)
3703 rtx op0 = NULL_RTX;
3704 enum machine_mode mode = VOIDmode, inner_mode;
3706 switch (TREE_CODE (exp))
3708 case PARM_DECL:
3710 mode = DECL_MODE (exp);
3711 op0 = expand_debug_parm_decl (exp);
3712 if (op0)
3713 break;
3714 /* See if this isn't an argument that has been completely
3715 optimized out. */
3716 if (!DECL_RTL_SET_P (exp)
3717 && !DECL_INCOMING_RTL (exp)
3718 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3720 tree aexp = DECL_ORIGIN (exp);
3721 if (DECL_CONTEXT (aexp)
3722 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3724 vec<tree, va_gc> **debug_args;
3725 unsigned int ix;
3726 tree ddecl;
3727 debug_args = decl_debug_args_lookup (current_function_decl);
3728 if (debug_args != NULL)
3730 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
3731 ix += 2)
3732 if (ddecl == aexp)
3733 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3737 break;
3739 default:
3740 break;
3743 if (op0 == NULL_RTX)
3744 return NULL_RTX;
3746 inner_mode = GET_MODE (op0);
3747 if (mode == inner_mode)
3748 return op0;
3750 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3752 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3753 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3754 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3755 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3756 else
3757 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3759 else if (FLOAT_MODE_P (mode))
3760 gcc_unreachable ();
3761 else if (FLOAT_MODE_P (inner_mode))
3763 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3764 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3765 else
3766 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3768 else if (CONSTANT_P (op0)
3769 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3770 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3771 subreg_lowpart_offset (mode, inner_mode));
3772 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3773 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3774 else
3775 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3777 return op0;
3780 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3781 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3782 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
3784 static void
3785 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3787 rtx exp = *exp_p;
3789 if (exp == NULL_RTX)
3790 return;
3792 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3793 return;
3795 if (depth == 4)
3797 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3798 rtx dval = make_debug_expr_from_rtl (exp);
3800 /* Emit a debug bind insn before INSN. */
3801 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3802 DEBUG_EXPR_TREE_DECL (dval), exp,
3803 VAR_INIT_STATUS_INITIALIZED);
3805 emit_debug_insn_before (bind, insn);
3806 *exp_p = dval;
3807 return;
3810 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3811 int i, j;
3812 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3813 switch (*format_ptr++)
3815 case 'e':
3816 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3817 break;
3819 case 'E':
3820 case 'V':
3821 for (j = 0; j < XVECLEN (exp, i); j++)
3822 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3823 break;
3825 default:
3826 break;
3830 /* Expand the _LOCs in debug insns. We run this after expanding all
3831 regular insns, so that any variables referenced in the function
3832 will have their DECL_RTLs set. */
3834 static void
3835 expand_debug_locations (void)
3837 rtx insn;
3838 rtx last = get_last_insn ();
3839 int save_strict_alias = flag_strict_aliasing;
3841 /* New alias sets while setting up memory attributes cause
3842 -fcompare-debug failures, even though it doesn't bring about any
3843 codegen changes. */
3844 flag_strict_aliasing = 0;
3846 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3847 if (DEBUG_INSN_P (insn))
3849 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3850 rtx val, prev_insn, insn2;
3851 enum machine_mode mode;
3853 if (value == NULL_TREE)
3854 val = NULL_RTX;
3855 else
3857 if (INSN_VAR_LOCATION_STATUS (insn)
3858 == VAR_INIT_STATUS_UNINITIALIZED)
3859 val = expand_debug_source_expr (value);
3860 else
3861 val = expand_debug_expr (value);
3862 gcc_assert (last == get_last_insn ());
3865 if (!val)
3866 val = gen_rtx_UNKNOWN_VAR_LOC ();
3867 else
3869 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3871 gcc_assert (mode == GET_MODE (val)
3872 || (GET_MODE (val) == VOIDmode
3873 && (CONST_SCALAR_INT_P (val)
3874 || GET_CODE (val) == CONST_FIXED
3875 || GET_CODE (val) == LABEL_REF)));
3878 INSN_VAR_LOCATION_LOC (insn) = val;
3879 prev_insn = PREV_INSN (insn);
3880 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3881 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
3884 flag_strict_aliasing = save_strict_alias;
3887 /* Expand basic block BB from GIMPLE trees to RTL. */
3889 static basic_block
3890 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
3892 gimple_stmt_iterator gsi;
3893 gimple_seq stmts;
3894 gimple stmt = NULL;
3895 rtx note, last;
3896 edge e;
3897 edge_iterator ei;
3898 void **elt;
3900 if (dump_file)
3901 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3902 bb->index);
3904 /* Note that since we are now transitioning from GIMPLE to RTL, we
3905 cannot use the gsi_*_bb() routines because they expect the basic
3906 block to be in GIMPLE, instead of RTL. Therefore, we need to
3907 access the BB sequence directly. */
3908 stmts = bb_seq (bb);
3909 bb->il.gimple.seq = NULL;
3910 bb->il.gimple.phi_nodes = NULL;
3911 rtl_profile_for_bb (bb);
3912 init_rtl_bb_info (bb);
3913 bb->flags |= BB_RTL;
3915 /* Remove the RETURN_EXPR if we may fall though to the exit
3916 instead. */
3917 gsi = gsi_last (stmts);
3918 if (!gsi_end_p (gsi)
3919 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3921 gimple ret_stmt = gsi_stmt (gsi);
3923 gcc_assert (single_succ_p (bb));
3924 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3926 if (bb->next_bb == EXIT_BLOCK_PTR
3927 && !gimple_return_retval (ret_stmt))
3929 gsi_remove (&gsi, false);
3930 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3934 gsi = gsi_start (stmts);
3935 if (!gsi_end_p (gsi))
3937 stmt = gsi_stmt (gsi);
3938 if (gimple_code (stmt) != GIMPLE_LABEL)
3939 stmt = NULL;
3942 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3944 if (stmt || elt)
3946 last = get_last_insn ();
3948 if (stmt)
3950 expand_gimple_stmt (stmt);
3951 gsi_next (&gsi);
3954 if (elt)
3955 emit_label ((rtx) *elt);
3957 /* Java emits line number notes in the top of labels.
3958 ??? Make this go away once line number notes are obsoleted. */
3959 BB_HEAD (bb) = NEXT_INSN (last);
3960 if (NOTE_P (BB_HEAD (bb)))
3961 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3962 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3964 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3966 else
3967 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3969 NOTE_BASIC_BLOCK (note) = bb;
3971 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3973 basic_block new_bb;
3975 stmt = gsi_stmt (gsi);
3977 /* If this statement is a non-debug one, and we generate debug
3978 insns, then this one might be the last real use of a TERed
3979 SSA_NAME, but where there are still some debug uses further
3980 down. Expanding the current SSA name in such further debug
3981 uses by their RHS might lead to wrong debug info, as coalescing
3982 might make the operands of such RHS be placed into the same
3983 pseudo as something else. Like so:
3984 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3985 use(a_1);
3986 a_2 = ...
3987 #DEBUG ... => a_1
3988 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3989 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3990 the write to a_2 would actually have clobbered the place which
3991 formerly held a_0.
3993 So, instead of that, we recognize the situation, and generate
3994 debug temporaries at the last real use of TERed SSA names:
3995 a_1 = a_0 + 1;
3996 #DEBUG #D1 => a_1
3997 use(a_1);
3998 a_2 = ...
3999 #DEBUG ... => #D1
4001 if (MAY_HAVE_DEBUG_INSNS
4002 && SA.values
4003 && !is_gimple_debug (stmt))
4005 ssa_op_iter iter;
4006 tree op;
4007 gimple def;
4009 location_t sloc = curr_insn_location ();
4011 /* Look for SSA names that have their last use here (TERed
4012 names always have only one real use). */
4013 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4014 if ((def = get_gimple_for_ssa_name (op)))
4016 imm_use_iterator imm_iter;
4017 use_operand_p use_p;
4018 bool have_debug_uses = false;
4020 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4022 if (gimple_debug_bind_p (USE_STMT (use_p)))
4024 have_debug_uses = true;
4025 break;
4029 if (have_debug_uses)
4031 /* OP is a TERed SSA name, with DEF it's defining
4032 statement, and where OP is used in further debug
4033 instructions. Generate a debug temporary, and
4034 replace all uses of OP in debug insns with that
4035 temporary. */
4036 gimple debugstmt;
4037 tree value = gimple_assign_rhs_to_tree (def);
4038 tree vexpr = make_node (DEBUG_EXPR_DECL);
4039 rtx val;
4040 enum machine_mode mode;
4042 set_curr_insn_location (gimple_location (def));
4044 DECL_ARTIFICIAL (vexpr) = 1;
4045 TREE_TYPE (vexpr) = TREE_TYPE (value);
4046 if (DECL_P (value))
4047 mode = DECL_MODE (value);
4048 else
4049 mode = TYPE_MODE (TREE_TYPE (value));
4050 DECL_MODE (vexpr) = mode;
4052 val = gen_rtx_VAR_LOCATION
4053 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4055 emit_debug_insn (val);
4057 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4059 if (!gimple_debug_bind_p (debugstmt))
4060 continue;
4062 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4063 SET_USE (use_p, vexpr);
4065 update_stmt (debugstmt);
4069 set_curr_insn_location (sloc);
4072 currently_expanding_gimple_stmt = stmt;
4074 /* Expand this statement, then evaluate the resulting RTL and
4075 fixup the CFG accordingly. */
4076 if (gimple_code (stmt) == GIMPLE_COND)
4078 new_bb = expand_gimple_cond (bb, stmt);
4079 if (new_bb)
4080 return new_bb;
4082 else if (gimple_debug_bind_p (stmt))
4084 location_t sloc = curr_insn_location ();
4085 gimple_stmt_iterator nsi = gsi;
4087 for (;;)
4089 tree var = gimple_debug_bind_get_var (stmt);
4090 tree value;
4091 rtx val;
4092 enum machine_mode mode;
4094 if (TREE_CODE (var) != DEBUG_EXPR_DECL
4095 && TREE_CODE (var) != LABEL_DECL
4096 && !target_for_debug_bind (var))
4097 goto delink_debug_stmt;
4099 if (gimple_debug_bind_has_value_p (stmt))
4100 value = gimple_debug_bind_get_value (stmt);
4101 else
4102 value = NULL_TREE;
4104 last = get_last_insn ();
4106 set_curr_insn_location (gimple_location (stmt));
4108 if (DECL_P (var))
4109 mode = DECL_MODE (var);
4110 else
4111 mode = TYPE_MODE (TREE_TYPE (var));
4113 val = gen_rtx_VAR_LOCATION
4114 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4116 emit_debug_insn (val);
4118 if (dump_file && (dump_flags & TDF_DETAILS))
4120 /* We can't dump the insn with a TREE where an RTX
4121 is expected. */
4122 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4123 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4124 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4127 delink_debug_stmt:
4128 /* In order not to generate too many debug temporaries,
4129 we delink all uses of debug statements we already expanded.
4130 Therefore debug statements between definition and real
4131 use of TERed SSA names will continue to use the SSA name,
4132 and not be replaced with debug temps. */
4133 delink_stmt_imm_use (stmt);
4135 gsi = nsi;
4136 gsi_next (&nsi);
4137 if (gsi_end_p (nsi))
4138 break;
4139 stmt = gsi_stmt (nsi);
4140 if (!gimple_debug_bind_p (stmt))
4141 break;
4144 set_curr_insn_location (sloc);
4146 else if (gimple_debug_source_bind_p (stmt))
4148 location_t sloc = curr_insn_location ();
4149 tree var = gimple_debug_source_bind_get_var (stmt);
4150 tree value = gimple_debug_source_bind_get_value (stmt);
4151 rtx val;
4152 enum machine_mode mode;
4154 last = get_last_insn ();
4156 set_curr_insn_location (gimple_location (stmt));
4158 mode = DECL_MODE (var);
4160 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4161 VAR_INIT_STATUS_UNINITIALIZED);
4163 emit_debug_insn (val);
4165 if (dump_file && (dump_flags & TDF_DETAILS))
4167 /* We can't dump the insn with a TREE where an RTX
4168 is expected. */
4169 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4170 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4171 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4174 set_curr_insn_location (sloc);
4176 else
4178 if (is_gimple_call (stmt)
4179 && gimple_call_tail_p (stmt)
4180 && disable_tail_calls)
4181 gimple_call_set_tail (stmt, false);
4183 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4185 bool can_fallthru;
4186 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4187 if (new_bb)
4189 if (can_fallthru)
4190 bb = new_bb;
4191 else
4192 return new_bb;
4195 else
4197 def_operand_p def_p;
4198 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4200 if (def_p != NULL)
4202 /* Ignore this stmt if it is in the list of
4203 replaceable expressions. */
4204 if (SA.values
4205 && bitmap_bit_p (SA.values,
4206 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4207 continue;
4209 last = expand_gimple_stmt (stmt);
4210 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4215 currently_expanding_gimple_stmt = NULL;
4217 /* Expand implicit goto and convert goto_locus. */
4218 FOR_EACH_EDGE (e, ei, bb->succs)
4220 if (e->goto_locus != UNKNOWN_LOCATION)
4221 set_curr_insn_location (e->goto_locus);
4222 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4224 emit_jump (label_rtx_for_bb (e->dest));
4225 e->flags &= ~EDGE_FALLTHRU;
4229 /* Expanded RTL can create a jump in the last instruction of block.
4230 This later might be assumed to be a jump to successor and break edge insertion.
4231 We need to insert dummy move to prevent this. PR41440. */
4232 if (single_succ_p (bb)
4233 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4234 && (last = get_last_insn ())
4235 && JUMP_P (last))
4237 rtx dummy = gen_reg_rtx (SImode);
4238 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4241 do_pending_stack_adjust ();
4243 /* Find the block tail. The last insn in the block is the insn
4244 before a barrier and/or table jump insn. */
4245 last = get_last_insn ();
4246 if (BARRIER_P (last))
4247 last = PREV_INSN (last);
4248 if (JUMP_TABLE_DATA_P (last))
4249 last = PREV_INSN (PREV_INSN (last));
4250 BB_END (bb) = last;
4252 update_bb_for_insn (bb);
4254 return bb;
4258 /* Create a basic block for initialization code. */
4260 static basic_block
4261 construct_init_block (void)
4263 basic_block init_block, first_block;
4264 edge e = NULL;
4265 int flags;
4267 /* Multiple entry points not supported yet. */
4268 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4269 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4270 init_rtl_bb_info (EXIT_BLOCK_PTR);
4271 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4272 EXIT_BLOCK_PTR->flags |= BB_RTL;
4274 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4276 /* When entry edge points to first basic block, we don't need jump,
4277 otherwise we have to jump into proper target. */
4278 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4280 tree label = gimple_block_label (e->dest);
4282 emit_jump (label_rtx (label));
4283 flags = 0;
4285 else
4286 flags = EDGE_FALLTHRU;
4288 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4289 get_last_insn (),
4290 ENTRY_BLOCK_PTR);
4291 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4292 init_block->count = ENTRY_BLOCK_PTR->count;
4293 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4294 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4295 if (e)
4297 first_block = e->dest;
4298 redirect_edge_succ (e, init_block);
4299 e = make_edge (init_block, first_block, flags);
4301 else
4302 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4303 e->probability = REG_BR_PROB_BASE;
4304 e->count = ENTRY_BLOCK_PTR->count;
4306 update_bb_for_insn (init_block);
4307 return init_block;
4310 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4311 found in the block tree. */
4313 static void
4314 set_block_levels (tree block, int level)
4316 while (block)
4318 BLOCK_NUMBER (block) = level;
4319 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4320 block = BLOCK_CHAIN (block);
4324 /* Create a block containing landing pads and similar stuff. */
4326 static void
4327 construct_exit_block (void)
4329 rtx head = get_last_insn ();
4330 rtx end;
4331 basic_block exit_block;
4332 edge e, e2;
4333 unsigned ix;
4334 edge_iterator ei;
4335 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4337 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4339 /* Make sure the locus is set to the end of the function, so that
4340 epilogue line numbers and warnings are set properly. */
4341 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
4342 input_location = cfun->function_end_locus;
4344 /* Generate rtl for function exit. */
4345 expand_function_end ();
4347 end = get_last_insn ();
4348 if (head == end)
4349 return;
4350 /* While emitting the function end we could move end of the last basic block.
4352 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4353 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4354 head = NEXT_INSN (head);
4355 exit_block = create_basic_block (NEXT_INSN (head), end,
4356 EXIT_BLOCK_PTR->prev_bb);
4357 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4358 exit_block->count = EXIT_BLOCK_PTR->count;
4359 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4360 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4362 ix = 0;
4363 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4365 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4366 if (!(e->flags & EDGE_ABNORMAL))
4367 redirect_edge_succ (e, exit_block);
4368 else
4369 ix++;
4372 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4373 e->probability = REG_BR_PROB_BASE;
4374 e->count = EXIT_BLOCK_PTR->count;
4375 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4376 if (e2 != e)
4378 e->count -= e2->count;
4379 exit_block->count -= e2->count;
4380 exit_block->frequency -= EDGE_FREQUENCY (e2);
4382 if (e->count < 0)
4383 e->count = 0;
4384 if (exit_block->count < 0)
4385 exit_block->count = 0;
4386 if (exit_block->frequency < 0)
4387 exit_block->frequency = 0;
4388 update_bb_for_insn (exit_block);
4391 /* Helper function for discover_nonconstant_array_refs.
4392 Look for ARRAY_REF nodes with non-constant indexes and mark them
4393 addressable. */
4395 static tree
4396 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4397 void *data ATTRIBUTE_UNUSED)
4399 tree t = *tp;
4401 if (IS_TYPE_OR_DECL_P (t))
4402 *walk_subtrees = 0;
4403 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4405 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4406 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4407 && (!TREE_OPERAND (t, 2)
4408 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4409 || (TREE_CODE (t) == COMPONENT_REF
4410 && (!TREE_OPERAND (t,2)
4411 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4412 || TREE_CODE (t) == BIT_FIELD_REF
4413 || TREE_CODE (t) == REALPART_EXPR
4414 || TREE_CODE (t) == IMAGPART_EXPR
4415 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4416 || CONVERT_EXPR_P (t))
4417 t = TREE_OPERAND (t, 0);
4419 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4421 t = get_base_address (t);
4422 if (t && DECL_P (t)
4423 && DECL_MODE (t) != BLKmode)
4424 TREE_ADDRESSABLE (t) = 1;
4427 *walk_subtrees = 0;
4430 return NULL_TREE;
4433 /* RTL expansion is not able to compile array references with variable
4434 offsets for arrays stored in single register. Discover such
4435 expressions and mark variables as addressable to avoid this
4436 scenario. */
4438 static void
4439 discover_nonconstant_array_refs (void)
4441 basic_block bb;
4442 gimple_stmt_iterator gsi;
4444 FOR_EACH_BB (bb)
4445 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4447 gimple stmt = gsi_stmt (gsi);
4448 if (!is_gimple_debug (stmt))
4449 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4453 /* This function sets crtl->args.internal_arg_pointer to a virtual
4454 register if DRAP is needed. Local register allocator will replace
4455 virtual_incoming_args_rtx with the virtual register. */
4457 static void
4458 expand_stack_alignment (void)
4460 rtx drap_rtx;
4461 unsigned int preferred_stack_boundary;
4463 if (! SUPPORTS_STACK_ALIGNMENT)
4464 return;
4466 if (cfun->calls_alloca
4467 || cfun->has_nonlocal_label
4468 || crtl->has_nonlocal_goto)
4469 crtl->need_drap = true;
4471 /* Call update_stack_boundary here again to update incoming stack
4472 boundary. It may set incoming stack alignment to a different
4473 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4474 use the minimum incoming stack alignment to check if it is OK
4475 to perform sibcall optimization since sibcall optimization will
4476 only align the outgoing stack to incoming stack boundary. */
4477 if (targetm.calls.update_stack_boundary)
4478 targetm.calls.update_stack_boundary ();
4480 /* The incoming stack frame has to be aligned at least at
4481 parm_stack_boundary. */
4482 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4484 /* Update crtl->stack_alignment_estimated and use it later to align
4485 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4486 exceptions since callgraph doesn't collect incoming stack alignment
4487 in this case. */
4488 if (cfun->can_throw_non_call_exceptions
4489 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4490 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4491 else
4492 preferred_stack_boundary = crtl->preferred_stack_boundary;
4493 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4494 crtl->stack_alignment_estimated = preferred_stack_boundary;
4495 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4496 crtl->stack_alignment_needed = preferred_stack_boundary;
4498 gcc_assert (crtl->stack_alignment_needed
4499 <= crtl->stack_alignment_estimated);
4501 crtl->stack_realign_needed
4502 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4503 crtl->stack_realign_tried = crtl->stack_realign_needed;
4505 crtl->stack_realign_processed = true;
4507 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4508 alignment. */
4509 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4510 drap_rtx = targetm.calls.get_drap_rtx ();
4512 /* stack_realign_drap and drap_rtx must match. */
4513 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4515 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4516 if (NULL != drap_rtx)
4518 crtl->args.internal_arg_pointer = drap_rtx;
4520 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4521 needed. */
4522 fixup_tail_calls ();
4526 /* Translate the intermediate representation contained in the CFG
4527 from GIMPLE trees to RTL.
4529 We do conversion per basic block and preserve/update the tree CFG.
4530 This implies we have to do some magic as the CFG can simultaneously
4531 consist of basic blocks containing RTL and GIMPLE trees. This can
4532 confuse the CFG hooks, so be careful to not manipulate CFG during
4533 the expansion. */
4535 static unsigned int
4536 gimple_expand_cfg (void)
4538 basic_block bb, init_block;
4539 sbitmap blocks;
4540 edge_iterator ei;
4541 edge e;
4542 rtx var_seq, var_ret_seq;
4543 unsigned i;
4545 timevar_push (TV_OUT_OF_SSA);
4546 rewrite_out_of_ssa (&SA);
4547 timevar_pop (TV_OUT_OF_SSA);
4548 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4550 /* Make sure all values used by the optimization passes have sane
4551 defaults. */
4552 reg_renumber = 0;
4554 /* Some backends want to know that we are expanding to RTL. */
4555 currently_expanding_to_rtl = 1;
4556 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4557 free_dominance_info (CDI_DOMINATORS);
4559 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4561 insn_locations_init ();
4562 if (!DECL_IS_BUILTIN (current_function_decl))
4564 /* Eventually, all FEs should explicitly set function_start_locus. */
4565 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
4566 set_curr_insn_location
4567 (DECL_SOURCE_LOCATION (current_function_decl));
4568 else
4569 set_curr_insn_location (cfun->function_start_locus);
4571 else
4572 set_curr_insn_location (UNKNOWN_LOCATION);
4573 prologue_location = curr_insn_location ();
4575 #ifdef INSN_SCHEDULING
4576 init_sched_attrs ();
4577 #endif
4579 /* Make sure first insn is a note even if we don't want linenums.
4580 This makes sure the first insn will never be deleted.
4581 Also, final expects a note to appear there. */
4582 emit_note (NOTE_INSN_DELETED);
4584 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4585 discover_nonconstant_array_refs ();
4587 targetm.expand_to_rtl_hook ();
4588 crtl->stack_alignment_needed = STACK_BOUNDARY;
4589 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4590 crtl->stack_alignment_estimated = 0;
4591 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4592 cfun->cfg->max_jumptable_ents = 0;
4594 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4595 of the function section at exapnsion time to predict distance of calls. */
4596 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4598 /* Expand the variables recorded during gimple lowering. */
4599 timevar_push (TV_VAR_EXPAND);
4600 start_sequence ();
4602 var_ret_seq = expand_used_vars ();
4604 var_seq = get_insns ();
4605 end_sequence ();
4606 timevar_pop (TV_VAR_EXPAND);
4608 /* Honor stack protection warnings. */
4609 if (warn_stack_protect)
4611 if (cfun->calls_alloca)
4612 warning (OPT_Wstack_protector,
4613 "stack protector not protecting local variables: "
4614 "variable length buffer");
4615 if (has_short_buffer && !crtl->stack_protect_guard)
4616 warning (OPT_Wstack_protector,
4617 "stack protector not protecting function: "
4618 "all local arrays are less than %d bytes long",
4619 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4622 /* Set up parameters and prepare for return, for the function. */
4623 expand_function_start (current_function_decl);
4625 /* If we emitted any instructions for setting up the variables,
4626 emit them before the FUNCTION_START note. */
4627 if (var_seq)
4629 emit_insn_before (var_seq, parm_birth_insn);
4631 /* In expand_function_end we'll insert the alloca save/restore
4632 before parm_birth_insn. We've just insertted an alloca call.
4633 Adjust the pointer to match. */
4634 parm_birth_insn = var_seq;
4637 /* Now that we also have the parameter RTXs, copy them over to our
4638 partitions. */
4639 for (i = 0; i < SA.map->num_partitions; i++)
4641 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4643 if (TREE_CODE (var) != VAR_DECL
4644 && !SA.partition_to_pseudo[i])
4645 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4646 gcc_assert (SA.partition_to_pseudo[i]);
4648 /* If this decl was marked as living in multiple places, reset
4649 this now to NULL. */
4650 if (DECL_RTL_IF_SET (var) == pc_rtx)
4651 SET_DECL_RTL (var, NULL);
4653 /* Some RTL parts really want to look at DECL_RTL(x) when x
4654 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4655 SET_DECL_RTL here making this available, but that would mean
4656 to select one of the potentially many RTLs for one DECL. Instead
4657 of doing that we simply reset the MEM_EXPR of the RTL in question,
4658 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4659 if (!DECL_RTL_SET_P (var))
4661 if (MEM_P (SA.partition_to_pseudo[i]))
4662 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4666 /* If we have a class containing differently aligned pointers
4667 we need to merge those into the corresponding RTL pointer
4668 alignment. */
4669 for (i = 1; i < num_ssa_names; i++)
4671 tree name = ssa_name (i);
4672 int part;
4673 rtx r;
4675 if (!name
4676 /* We might have generated new SSA names in
4677 update_alias_info_with_stack_vars. They will have a NULL
4678 defining statements, and won't be part of the partitioning,
4679 so ignore those. */
4680 || !SSA_NAME_DEF_STMT (name))
4681 continue;
4682 part = var_to_partition (SA.map, name);
4683 if (part == NO_PARTITION)
4684 continue;
4686 /* Adjust all partition members to get the underlying decl of
4687 the representative which we might have created in expand_one_var. */
4688 if (SSA_NAME_VAR (name) == NULL_TREE)
4690 tree leader = partition_to_var (SA.map, part);
4691 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4692 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4694 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4695 continue;
4697 r = SA.partition_to_pseudo[part];
4698 if (REG_P (r))
4699 mark_reg_pointer (r, get_pointer_alignment (name));
4702 /* If this function is `main', emit a call to `__main'
4703 to run global initializers, etc. */
4704 if (DECL_NAME (current_function_decl)
4705 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4706 && DECL_FILE_SCOPE_P (current_function_decl))
4707 expand_main_function ();
4709 /* Initialize the stack_protect_guard field. This must happen after the
4710 call to __main (if any) so that the external decl is initialized. */
4711 if (crtl->stack_protect_guard)
4712 stack_protect_prologue ();
4714 expand_phi_nodes (&SA);
4716 /* Register rtl specific functions for cfg. */
4717 rtl_register_cfg_hooks ();
4719 init_block = construct_init_block ();
4721 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4722 remaining edges later. */
4723 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4724 e->flags &= ~EDGE_EXECUTABLE;
4726 lab_rtx_for_bb = pointer_map_create ();
4727 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4728 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
4730 if (MAY_HAVE_DEBUG_INSNS)
4731 expand_debug_locations ();
4733 /* Free stuff we no longer need after GIMPLE optimizations. */
4734 free_dominance_info (CDI_DOMINATORS);
4735 free_dominance_info (CDI_POST_DOMINATORS);
4736 delete_tree_cfg_annotations ();
4738 timevar_push (TV_OUT_OF_SSA);
4739 finish_out_of_ssa (&SA);
4740 timevar_pop (TV_OUT_OF_SSA);
4742 timevar_push (TV_POST_EXPAND);
4743 /* We are no longer in SSA form. */
4744 cfun->gimple_df->in_ssa_p = false;
4745 if (current_loops)
4746 loops_state_clear (LOOP_CLOSED_SSA);
4748 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4749 conservatively to true until they are all profile aware. */
4750 pointer_map_destroy (lab_rtx_for_bb);
4751 free_histograms ();
4753 construct_exit_block ();
4754 insn_locations_finalize ();
4756 if (var_ret_seq)
4758 rtx after = return_label;
4759 rtx next = NEXT_INSN (after);
4760 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
4761 after = next;
4762 emit_insn_after (var_ret_seq, after);
4765 /* Zap the tree EH table. */
4766 set_eh_throw_stmt_table (cfun, NULL);
4768 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4769 split edges which edge insertions might do. */
4770 rebuild_jump_labels (get_insns ());
4772 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4774 edge e;
4775 edge_iterator ei;
4776 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4778 if (e->insns.r)
4780 rebuild_jump_labels_chain (e->insns.r);
4781 /* Avoid putting insns before parm_birth_insn. */
4782 if (e->src == ENTRY_BLOCK_PTR
4783 && single_succ_p (ENTRY_BLOCK_PTR)
4784 && parm_birth_insn)
4786 rtx insns = e->insns.r;
4787 e->insns.r = NULL_RTX;
4788 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4790 else
4791 commit_one_edge_insertion (e);
4793 else
4794 ei_next (&ei);
4798 /* We're done expanding trees to RTL. */
4799 currently_expanding_to_rtl = 0;
4801 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4803 edge e;
4804 edge_iterator ei;
4805 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4807 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4808 e->flags &= ~EDGE_EXECUTABLE;
4810 /* At the moment not all abnormal edges match the RTL
4811 representation. It is safe to remove them here as
4812 find_many_sub_basic_blocks will rediscover them.
4813 In the future we should get this fixed properly. */
4814 if ((e->flags & EDGE_ABNORMAL)
4815 && !(e->flags & EDGE_SIBCALL))
4816 remove_edge (e);
4817 else
4818 ei_next (&ei);
4822 blocks = sbitmap_alloc (last_basic_block);
4823 bitmap_ones (blocks);
4824 find_many_sub_basic_blocks (blocks);
4825 sbitmap_free (blocks);
4826 purge_all_dead_edges ();
4828 expand_stack_alignment ();
4830 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4831 function. */
4832 if (crtl->tail_call_emit)
4833 fixup_tail_calls ();
4835 /* After initial rtl generation, call back to finish generating
4836 exception support code. We need to do this before cleaning up
4837 the CFG as the code does not expect dead landing pads. */
4838 if (cfun->eh->region_tree != NULL)
4839 finish_eh_generation ();
4841 /* Remove unreachable blocks, otherwise we cannot compute dominators
4842 which are needed for loop state verification. As a side-effect
4843 this also compacts blocks.
4844 ??? We cannot remove trivially dead insns here as for example
4845 the DRAP reg on i?86 is not magically live at this point.
4846 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4847 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4849 #ifdef ENABLE_CHECKING
4850 verify_flow_info ();
4851 #endif
4853 /* Initialize pseudos allocated for hard registers. */
4854 emit_initial_value_sets ();
4856 /* And finally unshare all RTL. */
4857 unshare_all_rtl ();
4859 /* There's no need to defer outputting this function any more; we
4860 know we want to output it. */
4861 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4863 /* Now that we're done expanding trees to RTL, we shouldn't have any
4864 more CONCATs anywhere. */
4865 generating_concat_p = 0;
4867 if (dump_file)
4869 fprintf (dump_file,
4870 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4871 /* And the pass manager will dump RTL for us. */
4874 /* If we're emitting a nested function, make sure its parent gets
4875 emitted as well. Doing otherwise confuses debug info. */
4877 tree parent;
4878 for (parent = DECL_CONTEXT (current_function_decl);
4879 parent != NULL_TREE;
4880 parent = get_containing_scope (parent))
4881 if (TREE_CODE (parent) == FUNCTION_DECL)
4882 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4885 /* We are now committed to emitting code for this function. Do any
4886 preparation, such as emitting abstract debug info for the inline
4887 before it gets mangled by optimization. */
4888 if (cgraph_function_possibly_inlined_p (current_function_decl))
4889 (*debug_hooks->outlining_inline_function) (current_function_decl);
4891 TREE_ASM_WRITTEN (current_function_decl) = 1;
4893 /* After expanding, the return labels are no longer needed. */
4894 return_label = NULL;
4895 naked_return_label = NULL;
4897 /* After expanding, the tm_restart map is no longer needed. */
4898 if (cfun->gimple_df->tm_restart)
4900 htab_delete (cfun->gimple_df->tm_restart);
4901 cfun->gimple_df->tm_restart = NULL;
4904 /* Tag the blocks with a depth number so that change_scope can find
4905 the common parent easily. */
4906 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4907 default_rtl_profile ();
4909 timevar_pop (TV_POST_EXPAND);
4911 return 0;
4914 namespace {
4916 const pass_data pass_data_expand =
4918 RTL_PASS, /* type */
4919 "expand", /* name */
4920 OPTGROUP_NONE, /* optinfo_flags */
4921 false, /* has_gate */
4922 true, /* has_execute */
4923 TV_EXPAND, /* tv_id */
4924 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
4925 | PROP_gimple_lcx
4926 | PROP_gimple_lvec ), /* properties_required */
4927 PROP_rtl, /* properties_provided */
4928 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
4929 ( TODO_verify_ssa | TODO_verify_flow
4930 | TODO_verify_stmts ), /* todo_flags_start */
4931 0, /* todo_flags_finish */
4934 class pass_expand : public rtl_opt_pass
4936 public:
4937 pass_expand(gcc::context *ctxt)
4938 : rtl_opt_pass(pass_data_expand, ctxt)
4941 /* opt_pass methods: */
4942 unsigned int execute () { return gimple_expand_cfg (); }
4944 }; // class pass_expand
4946 } // anon namespace
4948 rtl_opt_pass *
4949 make_pass_expand (gcc::context *ctxt)
4951 return new pass_expand (ctxt);