2013-10-11 Marc Glisse <marc.glisse@inria.fr>
[official-gcc.git] / gcc / cfgexpand.c
blob7ed29f5c227311a78e894ea6bd08016aea055c34
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "langhooks.h"
31 #include "tree-ssa.h"
32 #include "tree-pass.h"
33 #include "except.h"
34 #include "flags.h"
35 #include "diagnostic.h"
36 #include "gimple-pretty-print.h"
37 #include "toplev.h"
38 #include "debug.h"
39 #include "params.h"
40 #include "tree-inline.h"
41 #include "value-prof.h"
42 #include "target.h"
43 #include "tree-outof-ssa.h"
44 #include "bitmap.h"
45 #include "sbitmap.h"
46 #include "cfgloop.h"
47 #include "regs.h" /* For reg_renumber. */
48 #include "insn-attr.h" /* For INSN_SCHEDULING. */
49 #include "asan.h"
51 /* This variable holds information helping the rewriting of SSA trees
52 into RTL. */
53 struct ssaexpand SA;
55 /* This variable holds the currently expanded gimple statement for purposes
56 of comminucating the profile info to the builtin expanders. */
57 gimple currently_expanding_gimple_stmt;
59 static rtx expand_debug_expr (tree);
61 /* Return an expression tree corresponding to the RHS of GIMPLE
62 statement STMT. */
64 tree
65 gimple_assign_rhs_to_tree (gimple stmt)
67 tree t;
68 enum gimple_rhs_class grhs_class;
70 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
72 if (grhs_class == GIMPLE_TERNARY_RHS)
73 t = build3 (gimple_assign_rhs_code (stmt),
74 TREE_TYPE (gimple_assign_lhs (stmt)),
75 gimple_assign_rhs1 (stmt),
76 gimple_assign_rhs2 (stmt),
77 gimple_assign_rhs3 (stmt));
78 else if (grhs_class == GIMPLE_BINARY_RHS)
79 t = build2 (gimple_assign_rhs_code (stmt),
80 TREE_TYPE (gimple_assign_lhs (stmt)),
81 gimple_assign_rhs1 (stmt),
82 gimple_assign_rhs2 (stmt));
83 else if (grhs_class == GIMPLE_UNARY_RHS)
84 t = build1 (gimple_assign_rhs_code (stmt),
85 TREE_TYPE (gimple_assign_lhs (stmt)),
86 gimple_assign_rhs1 (stmt));
87 else if (grhs_class == GIMPLE_SINGLE_RHS)
89 t = gimple_assign_rhs1 (stmt);
90 /* Avoid modifying this tree in place below. */
91 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
92 && gimple_location (stmt) != EXPR_LOCATION (t))
93 || (gimple_block (stmt)
94 && currently_expanding_to_rtl
95 && EXPR_P (t)))
96 t = copy_node (t);
98 else
99 gcc_unreachable ();
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
104 return t;
108 #ifndef STACK_ALIGNMENT_NEEDED
109 #define STACK_ALIGNMENT_NEEDED 1
110 #endif
112 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
114 /* Associate declaration T with storage space X. If T is no
115 SSA name this is exactly SET_DECL_RTL, otherwise make the
116 partition of T associated with X. */
117 static inline void
118 set_rtl (tree t, rtx x)
120 if (TREE_CODE (t) == SSA_NAME)
122 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
123 if (x && !MEM_P (x))
124 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
125 /* For the benefit of debug information at -O0 (where vartracking
126 doesn't run) record the place also in the base DECL if it's
127 a normal variable (not a parameter). */
128 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
130 tree var = SSA_NAME_VAR (t);
131 /* If we don't yet have something recorded, just record it now. */
132 if (!DECL_RTL_SET_P (var))
133 SET_DECL_RTL (var, x);
134 /* If we have it set already to "multiple places" don't
135 change this. */
136 else if (DECL_RTL (var) == pc_rtx)
138 /* If we have something recorded and it's not the same place
139 as we want to record now, we have multiple partitions for the
140 same base variable, with different places. We can't just
141 randomly chose one, hence we have to say that we don't know.
142 This only happens with optimization, and there var-tracking
143 will figure out the right thing. */
144 else if (DECL_RTL (var) != x)
145 SET_DECL_RTL (var, pc_rtx);
148 else
149 SET_DECL_RTL (t, x);
152 /* This structure holds data relevant to one variable that will be
153 placed in a stack slot. */
154 struct stack_var
156 /* The Variable. */
157 tree decl;
159 /* Initially, the size of the variable. Later, the size of the partition,
160 if this variable becomes it's partition's representative. */
161 HOST_WIDE_INT size;
163 /* The *byte* alignment required for this variable. Or as, with the
164 size, the alignment for this partition. */
165 unsigned int alignb;
167 /* The partition representative. */
168 size_t representative;
170 /* The next stack variable in the partition, or EOC. */
171 size_t next;
173 /* The numbers of conflicting stack variables. */
174 bitmap conflicts;
177 #define EOC ((size_t)-1)
179 /* We have an array of such objects while deciding allocation. */
180 static struct stack_var *stack_vars;
181 static size_t stack_vars_alloc;
182 static size_t stack_vars_num;
183 static struct pointer_map_t *decl_to_stack_part;
185 /* Conflict bitmaps go on this obstack. This allows us to destroy
186 all of them in one big sweep. */
187 static bitmap_obstack stack_var_bitmap_obstack;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
209 static unsigned int
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
220 static HOST_WIDE_INT
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
233 else
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
247 return offset;
250 /* Accumulate DECL into STACK_VARS. */
252 static void
253 add_stack_var (tree decl)
255 struct stack_var *v;
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 else
262 stack_vars_alloc = 32;
263 stack_vars
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
272 v->decl = decl;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
276 if (v->size == 0)
277 v->size = 1;
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
284 v->next = EOC;
286 /* All variables initially conflict with no other. */
287 v->conflicts = NULL;
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
292 stack_vars_num++;
295 /* Make the decls associated with luid's X and Y conflict. */
297 static void
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
302 if (!a->conflicts)
303 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
304 if (!b->conflicts)
305 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
312 static bool
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
317 if (x == y)
318 return false;
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
323 return true;
325 if (!a->conflicts || !b->conflicts)
326 return false;
327 return bitmap_bit_p (a->conflicts, y);
330 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
331 enter its partition number into bitmap DATA. */
333 static bool
334 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
336 bitmap active = (bitmap)data;
337 op = get_base_address (op);
338 if (op
339 && DECL_P (op)
340 && DECL_RTL_IF_SET (op) == pc_rtx)
342 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
343 if (v)
344 bitmap_set_bit (active, *v);
346 return false;
349 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
350 record conflicts between it and all currently active other partitions
351 from bitmap DATA. */
353 static bool
354 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
356 bitmap active = (bitmap)data;
357 op = get_base_address (op);
358 if (op
359 && DECL_P (op)
360 && DECL_RTL_IF_SET (op) == pc_rtx)
362 size_t *v =
363 (size_t *) pointer_map_contains (decl_to_stack_part, op);
364 if (v && bitmap_set_bit (active, *v))
366 size_t num = *v;
367 bitmap_iterator bi;
368 unsigned i;
369 gcc_assert (num < stack_vars_num);
370 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
371 add_stack_var_conflict (num, i);
374 return false;
377 /* Helper routine for add_scope_conflicts, calculating the active partitions
378 at the end of BB, leaving the result in WORK. We're called to generate
379 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
380 liveness. */
382 static void
383 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
385 edge e;
386 edge_iterator ei;
387 gimple_stmt_iterator gsi;
388 bool (*visit)(gimple, tree, void *);
390 bitmap_clear (work);
391 FOR_EACH_EDGE (e, ei, bb->preds)
392 bitmap_ior_into (work, (bitmap)e->src->aux);
394 visit = visit_op;
396 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
398 gimple stmt = gsi_stmt (gsi);
399 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
401 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
403 gimple stmt = gsi_stmt (gsi);
405 if (gimple_clobber_p (stmt))
407 tree lhs = gimple_assign_lhs (stmt);
408 size_t *v;
409 /* Nested function lowering might introduce LHSs
410 that are COMPONENT_REFs. */
411 if (TREE_CODE (lhs) != VAR_DECL)
412 continue;
413 if (DECL_RTL_IF_SET (lhs) == pc_rtx
414 && (v = (size_t *)
415 pointer_map_contains (decl_to_stack_part, lhs)))
416 bitmap_clear_bit (work, *v);
418 else if (!is_gimple_debug (stmt))
420 if (for_conflict
421 && visit == visit_op)
423 /* If this is the first real instruction in this BB we need
424 to add conflicts for everything live at this point now.
425 Unlike classical liveness for named objects we can't
426 rely on seeing a def/use of the names we're interested in.
427 There might merely be indirect loads/stores. We'd not add any
428 conflicts for such partitions. */
429 bitmap_iterator bi;
430 unsigned i;
431 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
433 struct stack_var *a = &stack_vars[i];
434 if (!a->conflicts)
435 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
436 bitmap_ior_into (a->conflicts, work);
438 visit = visit_conflict;
440 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
445 /* Generate stack partition conflicts between all partitions that are
446 simultaneously live. */
448 static void
449 add_scope_conflicts (void)
451 basic_block bb;
452 bool changed;
453 bitmap work = BITMAP_ALLOC (NULL);
454 int *rpo;
455 int n_bbs;
457 /* We approximate the live range of a stack variable by taking the first
458 mention of its name as starting point(s), and by the end-of-scope
459 death clobber added by gimplify as ending point(s) of the range.
460 This overapproximates in the case we for instance moved an address-taken
461 operation upward, without also moving a dereference to it upwards.
462 But it's conservatively correct as a variable never can hold values
463 before its name is mentioned at least once.
465 We then do a mostly classical bitmap liveness algorithm. */
467 FOR_ALL_BB (bb)
468 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
470 rpo = XNEWVEC (int, last_basic_block);
471 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
473 changed = true;
474 while (changed)
476 int i;
477 changed = false;
478 for (i = 0; i < n_bbs; i++)
480 bitmap active;
481 bb = BASIC_BLOCK (rpo[i]);
482 active = (bitmap)bb->aux;
483 add_scope_conflicts_1 (bb, work, false);
484 if (bitmap_ior_into (active, work))
485 changed = true;
489 FOR_EACH_BB (bb)
490 add_scope_conflicts_1 (bb, work, true);
492 free (rpo);
493 BITMAP_FREE (work);
494 FOR_ALL_BB (bb)
495 BITMAP_FREE (bb->aux);
498 /* A subroutine of partition_stack_vars. A comparison function for qsort,
499 sorting an array of indices by the properties of the object. */
501 static int
502 stack_var_cmp (const void *a, const void *b)
504 size_t ia = *(const size_t *)a;
505 size_t ib = *(const size_t *)b;
506 unsigned int aligna = stack_vars[ia].alignb;
507 unsigned int alignb = stack_vars[ib].alignb;
508 HOST_WIDE_INT sizea = stack_vars[ia].size;
509 HOST_WIDE_INT sizeb = stack_vars[ib].size;
510 tree decla = stack_vars[ia].decl;
511 tree declb = stack_vars[ib].decl;
512 bool largea, largeb;
513 unsigned int uida, uidb;
515 /* Primary compare on "large" alignment. Large comes first. */
516 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
517 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
518 if (largea != largeb)
519 return (int)largeb - (int)largea;
521 /* Secondary compare on size, decreasing */
522 if (sizea > sizeb)
523 return -1;
524 if (sizea < sizeb)
525 return 1;
527 /* Tertiary compare on true alignment, decreasing. */
528 if (aligna < alignb)
529 return -1;
530 if (aligna > alignb)
531 return 1;
533 /* Final compare on ID for sort stability, increasing.
534 Two SSA names are compared by their version, SSA names come before
535 non-SSA names, and two normal decls are compared by their DECL_UID. */
536 if (TREE_CODE (decla) == SSA_NAME)
538 if (TREE_CODE (declb) == SSA_NAME)
539 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
540 else
541 return -1;
543 else if (TREE_CODE (declb) == SSA_NAME)
544 return 1;
545 else
546 uida = DECL_UID (decla), uidb = DECL_UID (declb);
547 if (uida < uidb)
548 return 1;
549 if (uida > uidb)
550 return -1;
551 return 0;
555 /* If the points-to solution *PI points to variables that are in a partition
556 together with other variables add all partition members to the pointed-to
557 variables bitmap. */
559 static void
560 add_partitioned_vars_to_ptset (struct pt_solution *pt,
561 struct pointer_map_t *decls_to_partitions,
562 struct pointer_set_t *visited, bitmap temp)
564 bitmap_iterator bi;
565 unsigned i;
566 bitmap *part;
568 if (pt->anything
569 || pt->vars == NULL
570 /* The pointed-to vars bitmap is shared, it is enough to
571 visit it once. */
572 || pointer_set_insert (visited, pt->vars))
573 return;
575 bitmap_clear (temp);
577 /* By using a temporary bitmap to store all members of the partitions
578 we have to add we make sure to visit each of the partitions only
579 once. */
580 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
581 if ((!temp
582 || !bitmap_bit_p (temp, i))
583 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
584 (void *)(size_t) i)))
585 bitmap_ior_into (temp, *part);
586 if (!bitmap_empty_p (temp))
587 bitmap_ior_into (pt->vars, temp);
590 /* Update points-to sets based on partition info, so we can use them on RTL.
591 The bitmaps representing stack partitions will be saved until expand,
592 where partitioned decls used as bases in memory expressions will be
593 rewritten. */
595 static void
596 update_alias_info_with_stack_vars (void)
598 struct pointer_map_t *decls_to_partitions = NULL;
599 size_t i, j;
600 tree var = NULL_TREE;
602 for (i = 0; i < stack_vars_num; i++)
604 bitmap part = NULL;
605 tree name;
606 struct ptr_info_def *pi;
608 /* Not interested in partitions with single variable. */
609 if (stack_vars[i].representative != i
610 || stack_vars[i].next == EOC)
611 continue;
613 if (!decls_to_partitions)
615 decls_to_partitions = pointer_map_create ();
616 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
619 /* Create an SSA_NAME that points to the partition for use
620 as base during alias-oracle queries on RTL for bases that
621 have been partitioned. */
622 if (var == NULL_TREE)
623 var = create_tmp_var (ptr_type_node, NULL);
624 name = make_ssa_name (var, NULL);
626 /* Create bitmaps representing partitions. They will be used for
627 points-to sets later, so use GGC alloc. */
628 part = BITMAP_GGC_ALLOC ();
629 for (j = i; j != EOC; j = stack_vars[j].next)
631 tree decl = stack_vars[j].decl;
632 unsigned int uid = DECL_PT_UID (decl);
633 bitmap_set_bit (part, uid);
634 *((bitmap *) pointer_map_insert (decls_to_partitions,
635 (void *)(size_t) uid)) = part;
636 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
637 decl)) = name;
638 if (TREE_ADDRESSABLE (decl))
639 TREE_ADDRESSABLE (name) = 1;
642 /* Make the SSA name point to all partition members. */
643 pi = get_ptr_info (name);
644 pt_solution_set (&pi->pt, part, false);
647 /* Make all points-to sets that contain one member of a partition
648 contain all members of the partition. */
649 if (decls_to_partitions)
651 unsigned i;
652 struct pointer_set_t *visited = pointer_set_create ();
653 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
655 for (i = 1; i < num_ssa_names; i++)
657 tree name = ssa_name (i);
658 struct ptr_info_def *pi;
660 if (name
661 && POINTER_TYPE_P (TREE_TYPE (name))
662 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
663 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
664 visited, temp);
667 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
668 decls_to_partitions, visited, temp);
670 pointer_set_destroy (visited);
671 pointer_map_destroy (decls_to_partitions);
672 BITMAP_FREE (temp);
676 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
677 partitioning algorithm. Partitions A and B are known to be non-conflicting.
678 Merge them into a single partition A. */
680 static void
681 union_stack_vars (size_t a, size_t b)
683 struct stack_var *vb = &stack_vars[b];
684 bitmap_iterator bi;
685 unsigned u;
687 gcc_assert (stack_vars[b].next == EOC);
688 /* Add B to A's partition. */
689 stack_vars[b].next = stack_vars[a].next;
690 stack_vars[b].representative = a;
691 stack_vars[a].next = b;
693 /* Update the required alignment of partition A to account for B. */
694 if (stack_vars[a].alignb < stack_vars[b].alignb)
695 stack_vars[a].alignb = stack_vars[b].alignb;
697 /* Update the interference graph and merge the conflicts. */
698 if (vb->conflicts)
700 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
701 add_stack_var_conflict (a, stack_vars[u].representative);
702 BITMAP_FREE (vb->conflicts);
706 /* A subroutine of expand_used_vars. Binpack the variables into
707 partitions constrained by the interference graph. The overall
708 algorithm used is as follows:
710 Sort the objects by size in descending order.
711 For each object A {
712 S = size(A)
713 O = 0
714 loop {
715 Look for the largest non-conflicting object B with size <= S.
716 UNION (A, B)
721 static void
722 partition_stack_vars (void)
724 size_t si, sj, n = stack_vars_num;
726 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
727 for (si = 0; si < n; ++si)
728 stack_vars_sorted[si] = si;
730 if (n == 1)
731 return;
733 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
735 for (si = 0; si < n; ++si)
737 size_t i = stack_vars_sorted[si];
738 unsigned int ialign = stack_vars[i].alignb;
739 HOST_WIDE_INT isize = stack_vars[i].size;
741 /* Ignore objects that aren't partition representatives. If we
742 see a var that is not a partition representative, it must
743 have been merged earlier. */
744 if (stack_vars[i].representative != i)
745 continue;
747 for (sj = si + 1; sj < n; ++sj)
749 size_t j = stack_vars_sorted[sj];
750 unsigned int jalign = stack_vars[j].alignb;
751 HOST_WIDE_INT jsize = stack_vars[j].size;
753 /* Ignore objects that aren't partition representatives. */
754 if (stack_vars[j].representative != j)
755 continue;
757 /* Do not mix objects of "small" (supported) alignment
758 and "large" (unsupported) alignment. */
759 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
760 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
761 break;
763 /* For Address Sanitizer do not mix objects with different
764 sizes, as the shorter vars wouldn't be adequately protected.
765 Don't do that for "large" (unsupported) alignment objects,
766 those aren't protected anyway. */
767 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
768 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
769 break;
771 /* Ignore conflicting objects. */
772 if (stack_var_conflict_p (i, j))
773 continue;
775 /* UNION the objects, placing J at OFFSET. */
776 union_stack_vars (i, j);
780 update_alias_info_with_stack_vars ();
783 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
785 static void
786 dump_stack_var_partition (void)
788 size_t si, i, j, n = stack_vars_num;
790 for (si = 0; si < n; ++si)
792 i = stack_vars_sorted[si];
794 /* Skip variables that aren't partition representatives, for now. */
795 if (stack_vars[i].representative != i)
796 continue;
798 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
799 " align %u\n", (unsigned long) i, stack_vars[i].size,
800 stack_vars[i].alignb);
802 for (j = i; j != EOC; j = stack_vars[j].next)
804 fputc ('\t', dump_file);
805 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
807 fputc ('\n', dump_file);
811 /* Assign rtl to DECL at BASE + OFFSET. */
813 static void
814 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
815 HOST_WIDE_INT offset)
817 unsigned align;
818 rtx x;
820 /* If this fails, we've overflowed the stack frame. Error nicely? */
821 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
823 x = plus_constant (Pmode, base, offset);
824 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
826 if (TREE_CODE (decl) != SSA_NAME)
828 /* Set alignment we actually gave this decl if it isn't an SSA name.
829 If it is we generate stack slots only accidentally so it isn't as
830 important, we'll simply use the alignment that is already set. */
831 if (base == virtual_stack_vars_rtx)
832 offset -= frame_phase;
833 align = offset & -offset;
834 align *= BITS_PER_UNIT;
835 if (align == 0 || align > base_align)
836 align = base_align;
838 /* One would think that we could assert that we're not decreasing
839 alignment here, but (at least) the i386 port does exactly this
840 via the MINIMUM_ALIGNMENT hook. */
842 DECL_ALIGN (decl) = align;
843 DECL_USER_ALIGN (decl) = 0;
846 set_mem_attributes (x, SSAVAR (decl), true);
847 set_rtl (decl, x);
850 struct stack_vars_data
852 /* Vector of offset pairs, always end of some padding followed
853 by start of the padding that needs Address Sanitizer protection.
854 The vector is in reversed, highest offset pairs come first. */
855 vec<HOST_WIDE_INT> asan_vec;
857 /* Vector of partition representative decls in between the paddings. */
858 vec<tree> asan_decl_vec;
861 /* A subroutine of expand_used_vars. Give each partition representative
862 a unique location within the stack frame. Update each partition member
863 with that location. */
865 static void
866 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
868 size_t si, i, j, n = stack_vars_num;
869 HOST_WIDE_INT large_size = 0, large_alloc = 0;
870 rtx large_base = NULL;
871 unsigned large_align = 0;
872 tree decl;
874 /* Determine if there are any variables requiring "large" alignment.
875 Since these are dynamically allocated, we only process these if
876 no predicate involved. */
877 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
878 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
880 /* Find the total size of these variables. */
881 for (si = 0; si < n; ++si)
883 unsigned alignb;
885 i = stack_vars_sorted[si];
886 alignb = stack_vars[i].alignb;
888 /* Stop when we get to the first decl with "small" alignment. */
889 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
890 break;
892 /* Skip variables that aren't partition representatives. */
893 if (stack_vars[i].representative != i)
894 continue;
896 /* Skip variables that have already had rtl assigned. See also
897 add_stack_var where we perpetrate this pc_rtx hack. */
898 decl = stack_vars[i].decl;
899 if ((TREE_CODE (decl) == SSA_NAME
900 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
901 : DECL_RTL (decl)) != pc_rtx)
902 continue;
904 large_size += alignb - 1;
905 large_size &= -(HOST_WIDE_INT)alignb;
906 large_size += stack_vars[i].size;
909 /* If there were any, allocate space. */
910 if (large_size > 0)
911 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
912 large_align, true);
915 for (si = 0; si < n; ++si)
917 rtx base;
918 unsigned base_align, alignb;
919 HOST_WIDE_INT offset;
921 i = stack_vars_sorted[si];
923 /* Skip variables that aren't partition representatives, for now. */
924 if (stack_vars[i].representative != i)
925 continue;
927 /* Skip variables that have already had rtl assigned. See also
928 add_stack_var where we perpetrate this pc_rtx hack. */
929 decl = stack_vars[i].decl;
930 if ((TREE_CODE (decl) == SSA_NAME
931 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
932 : DECL_RTL (decl)) != pc_rtx)
933 continue;
935 /* Check the predicate to see whether this variable should be
936 allocated in this pass. */
937 if (pred && !pred (i))
938 continue;
940 alignb = stack_vars[i].alignb;
941 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
943 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
945 HOST_WIDE_INT prev_offset = frame_offset;
946 tree repr_decl = NULL_TREE;
948 offset
949 = alloc_stack_frame_space (stack_vars[i].size
950 + ASAN_RED_ZONE_SIZE,
951 MAX (alignb, ASAN_RED_ZONE_SIZE));
952 data->asan_vec.safe_push (prev_offset);
953 data->asan_vec.safe_push (offset + stack_vars[i].size);
954 /* Find best representative of the partition.
955 Prefer those with DECL_NAME, even better
956 satisfying asan_protect_stack_decl predicate. */
957 for (j = i; j != EOC; j = stack_vars[j].next)
958 if (asan_protect_stack_decl (stack_vars[j].decl)
959 && DECL_NAME (stack_vars[j].decl))
961 repr_decl = stack_vars[j].decl;
962 break;
964 else if (repr_decl == NULL_TREE
965 && DECL_P (stack_vars[j].decl)
966 && DECL_NAME (stack_vars[j].decl))
967 repr_decl = stack_vars[j].decl;
968 if (repr_decl == NULL_TREE)
969 repr_decl = stack_vars[i].decl;
970 data->asan_decl_vec.safe_push (repr_decl);
972 else
973 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
974 base = virtual_stack_vars_rtx;
975 base_align = crtl->max_used_stack_slot_alignment;
977 else
979 /* Large alignment is only processed in the last pass. */
980 if (pred)
981 continue;
982 gcc_assert (large_base != NULL);
984 large_alloc += alignb - 1;
985 large_alloc &= -(HOST_WIDE_INT)alignb;
986 offset = large_alloc;
987 large_alloc += stack_vars[i].size;
989 base = large_base;
990 base_align = large_align;
993 /* Create rtl for each variable based on their location within the
994 partition. */
995 for (j = i; j != EOC; j = stack_vars[j].next)
997 expand_one_stack_var_at (stack_vars[j].decl,
998 base, base_align,
999 offset);
1003 gcc_assert (large_alloc == large_size);
1006 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1007 static HOST_WIDE_INT
1008 account_stack_vars (void)
1010 size_t si, j, i, n = stack_vars_num;
1011 HOST_WIDE_INT size = 0;
1013 for (si = 0; si < n; ++si)
1015 i = stack_vars_sorted[si];
1017 /* Skip variables that aren't partition representatives, for now. */
1018 if (stack_vars[i].representative != i)
1019 continue;
1021 size += stack_vars[i].size;
1022 for (j = i; j != EOC; j = stack_vars[j].next)
1023 set_rtl (stack_vars[j].decl, NULL);
1025 return size;
1028 /* A subroutine of expand_one_var. Called to immediately assign rtl
1029 to a variable to be allocated in the stack frame. */
1031 static void
1032 expand_one_stack_var (tree var)
1034 HOST_WIDE_INT size, offset;
1035 unsigned byte_align;
1037 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1038 byte_align = align_local_variable (SSAVAR (var));
1040 /* We handle highly aligned variables in expand_stack_vars. */
1041 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1043 offset = alloc_stack_frame_space (size, byte_align);
1045 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1046 crtl->max_used_stack_slot_alignment, offset);
1049 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1050 that will reside in a hard register. */
1052 static void
1053 expand_one_hard_reg_var (tree var)
1055 rest_of_decl_compilation (var, 0, 0);
1058 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1059 that will reside in a pseudo register. */
1061 static void
1062 expand_one_register_var (tree var)
1064 tree decl = SSAVAR (var);
1065 tree type = TREE_TYPE (decl);
1066 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1067 rtx x = gen_reg_rtx (reg_mode);
1069 set_rtl (var, x);
1071 /* Note if the object is a user variable. */
1072 if (!DECL_ARTIFICIAL (decl))
1073 mark_user_reg (x);
1075 if (POINTER_TYPE_P (type))
1076 mark_reg_pointer (x, get_pointer_alignment (var));
1079 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1080 has some associated error, e.g. its type is error-mark. We just need
1081 to pick something that won't crash the rest of the compiler. */
1083 static void
1084 expand_one_error_var (tree var)
1086 enum machine_mode mode = DECL_MODE (var);
1087 rtx x;
1089 if (mode == BLKmode)
1090 x = gen_rtx_MEM (BLKmode, const0_rtx);
1091 else if (mode == VOIDmode)
1092 x = const0_rtx;
1093 else
1094 x = gen_reg_rtx (mode);
1096 SET_DECL_RTL (var, x);
1099 /* A subroutine of expand_one_var. VAR is a variable that will be
1100 allocated to the local stack frame. Return true if we wish to
1101 add VAR to STACK_VARS so that it will be coalesced with other
1102 variables. Return false to allocate VAR immediately.
1104 This function is used to reduce the number of variables considered
1105 for coalescing, which reduces the size of the quadratic problem. */
1107 static bool
1108 defer_stack_allocation (tree var, bool toplevel)
1110 /* If stack protection is enabled, *all* stack variables must be deferred,
1111 so that we can re-order the strings to the top of the frame.
1112 Similarly for Address Sanitizer. */
1113 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
1114 return true;
1116 /* We handle "large" alignment via dynamic allocation. We want to handle
1117 this extra complication in only one place, so defer them. */
1118 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1119 return true;
1121 /* Variables in the outermost scope automatically conflict with
1122 every other variable. The only reason to want to defer them
1123 at all is that, after sorting, we can more efficiently pack
1124 small variables in the stack frame. Continue to defer at -O2. */
1125 if (toplevel && optimize < 2)
1126 return false;
1128 /* Without optimization, *most* variables are allocated from the
1129 stack, which makes the quadratic problem large exactly when we
1130 want compilation to proceed as quickly as possible. On the
1131 other hand, we don't want the function's stack frame size to
1132 get completely out of hand. So we avoid adding scalars and
1133 "small" aggregates to the list at all. */
1134 if (optimize == 0
1135 && (tree_low_cst (DECL_SIZE_UNIT (var), 1)
1136 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)))
1137 return false;
1139 return true;
1142 /* A subroutine of expand_used_vars. Expand one variable according to
1143 its flavor. Variables to be placed on the stack are not actually
1144 expanded yet, merely recorded.
1145 When REALLY_EXPAND is false, only add stack values to be allocated.
1146 Return stack usage this variable is supposed to take.
1149 static HOST_WIDE_INT
1150 expand_one_var (tree var, bool toplevel, bool really_expand)
1152 unsigned int align = BITS_PER_UNIT;
1153 tree origvar = var;
1155 var = SSAVAR (var);
1157 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1159 /* Because we don't know if VAR will be in register or on stack,
1160 we conservatively assume it will be on stack even if VAR is
1161 eventually put into register after RA pass. For non-automatic
1162 variables, which won't be on stack, we collect alignment of
1163 type and ignore user specified alignment. */
1164 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1165 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1166 TYPE_MODE (TREE_TYPE (var)),
1167 TYPE_ALIGN (TREE_TYPE (var)));
1168 else if (DECL_HAS_VALUE_EXPR_P (var)
1169 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1170 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1171 or variables which were assigned a stack slot already by
1172 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1173 changed from the offset chosen to it. */
1174 align = crtl->stack_alignment_estimated;
1175 else
1176 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1178 /* If the variable alignment is very large we'll dynamicaly allocate
1179 it, which means that in-frame portion is just a pointer. */
1180 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1181 align = POINTER_SIZE;
1184 if (SUPPORTS_STACK_ALIGNMENT
1185 && crtl->stack_alignment_estimated < align)
1187 /* stack_alignment_estimated shouldn't change after stack
1188 realign decision made */
1189 gcc_assert (!crtl->stack_realign_processed);
1190 crtl->stack_alignment_estimated = align;
1193 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1194 So here we only make sure stack_alignment_needed >= align. */
1195 if (crtl->stack_alignment_needed < align)
1196 crtl->stack_alignment_needed = align;
1197 if (crtl->max_used_stack_slot_alignment < align)
1198 crtl->max_used_stack_slot_alignment = align;
1200 if (TREE_CODE (origvar) == SSA_NAME)
1202 gcc_assert (TREE_CODE (var) != VAR_DECL
1203 || (!DECL_EXTERNAL (var)
1204 && !DECL_HAS_VALUE_EXPR_P (var)
1205 && !TREE_STATIC (var)
1206 && TREE_TYPE (var) != error_mark_node
1207 && !DECL_HARD_REGISTER (var)
1208 && really_expand));
1210 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1212 else if (DECL_EXTERNAL (var))
1214 else if (DECL_HAS_VALUE_EXPR_P (var))
1216 else if (TREE_STATIC (var))
1218 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1220 else if (TREE_TYPE (var) == error_mark_node)
1222 if (really_expand)
1223 expand_one_error_var (var);
1225 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1227 if (really_expand)
1228 expand_one_hard_reg_var (var);
1230 else if (use_register_for_decl (var))
1232 if (really_expand)
1233 expand_one_register_var (origvar);
1235 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1237 /* Reject variables which cover more than half of the address-space. */
1238 if (really_expand)
1240 error ("size of variable %q+D is too large", var);
1241 expand_one_error_var (var);
1244 else if (defer_stack_allocation (var, toplevel))
1245 add_stack_var (origvar);
1246 else
1248 if (really_expand)
1249 expand_one_stack_var (origvar);
1250 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1252 return 0;
1255 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1256 expanding variables. Those variables that can be put into registers
1257 are allocated pseudos; those that can't are put on the stack.
1259 TOPLEVEL is true if this is the outermost BLOCK. */
1261 static void
1262 expand_used_vars_for_block (tree block, bool toplevel)
1264 tree t;
1266 /* Expand all variables at this level. */
1267 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1268 if (TREE_USED (t)
1269 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1270 || !DECL_NONSHAREABLE (t)))
1271 expand_one_var (t, toplevel, true);
1273 /* Expand all variables at containing levels. */
1274 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1275 expand_used_vars_for_block (t, false);
1278 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1279 and clear TREE_USED on all local variables. */
1281 static void
1282 clear_tree_used (tree block)
1284 tree t;
1286 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1287 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1288 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1289 || !DECL_NONSHAREABLE (t))
1290 TREE_USED (t) = 0;
1292 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1293 clear_tree_used (t);
1296 enum {
1297 SPCT_FLAG_DEFAULT = 1,
1298 SPCT_FLAG_ALL = 2,
1299 SPCT_FLAG_STRONG = 3
1302 /* Examine TYPE and determine a bit mask of the following features. */
1304 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1305 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1306 #define SPCT_HAS_ARRAY 4
1307 #define SPCT_HAS_AGGREGATE 8
1309 static unsigned int
1310 stack_protect_classify_type (tree type)
1312 unsigned int ret = 0;
1313 tree t;
1315 switch (TREE_CODE (type))
1317 case ARRAY_TYPE:
1318 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1319 if (t == char_type_node
1320 || t == signed_char_type_node
1321 || t == unsigned_char_type_node)
1323 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1324 unsigned HOST_WIDE_INT len;
1326 if (!TYPE_SIZE_UNIT (type)
1327 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1328 len = max;
1329 else
1330 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1332 if (len < max)
1333 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1334 else
1335 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1337 else
1338 ret = SPCT_HAS_ARRAY;
1339 break;
1341 case UNION_TYPE:
1342 case QUAL_UNION_TYPE:
1343 case RECORD_TYPE:
1344 ret = SPCT_HAS_AGGREGATE;
1345 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1346 if (TREE_CODE (t) == FIELD_DECL)
1347 ret |= stack_protect_classify_type (TREE_TYPE (t));
1348 break;
1350 default:
1351 break;
1354 return ret;
1357 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1358 part of the local stack frame. Remember if we ever return nonzero for
1359 any variable in this function. The return value is the phase number in
1360 which the variable should be allocated. */
1362 static int
1363 stack_protect_decl_phase (tree decl)
1365 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1366 int ret = 0;
1368 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1369 has_short_buffer = true;
1371 if (flag_stack_protect == SPCT_FLAG_ALL
1372 || flag_stack_protect == SPCT_FLAG_STRONG)
1374 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1375 && !(bits & SPCT_HAS_AGGREGATE))
1376 ret = 1;
1377 else if (bits & SPCT_HAS_ARRAY)
1378 ret = 2;
1380 else
1381 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1383 if (ret)
1384 has_protected_decls = true;
1386 return ret;
1389 /* Two helper routines that check for phase 1 and phase 2. These are used
1390 as callbacks for expand_stack_vars. */
1392 static bool
1393 stack_protect_decl_phase_1 (size_t i)
1395 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1398 static bool
1399 stack_protect_decl_phase_2 (size_t i)
1401 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1404 /* And helper function that checks for asan phase (with stack protector
1405 it is phase 3). This is used as callback for expand_stack_vars.
1406 Returns true if any of the vars in the partition need to be protected. */
1408 static bool
1409 asan_decl_phase_3 (size_t i)
1411 while (i != EOC)
1413 if (asan_protect_stack_decl (stack_vars[i].decl))
1414 return true;
1415 i = stack_vars[i].next;
1417 return false;
1420 /* Ensure that variables in different stack protection phases conflict
1421 so that they are not merged and share the same stack slot. */
1423 static void
1424 add_stack_protection_conflicts (void)
1426 size_t i, j, n = stack_vars_num;
1427 unsigned char *phase;
1429 phase = XNEWVEC (unsigned char, n);
1430 for (i = 0; i < n; ++i)
1431 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1433 for (i = 0; i < n; ++i)
1435 unsigned char ph_i = phase[i];
1436 for (j = i + 1; j < n; ++j)
1437 if (ph_i != phase[j])
1438 add_stack_var_conflict (i, j);
1441 XDELETEVEC (phase);
1444 /* Create a decl for the guard at the top of the stack frame. */
1446 static void
1447 create_stack_guard (void)
1449 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1450 VAR_DECL, NULL, ptr_type_node);
1451 TREE_THIS_VOLATILE (guard) = 1;
1452 TREE_USED (guard) = 1;
1453 expand_one_stack_var (guard);
1454 crtl->stack_protect_guard = guard;
1457 /* Prepare for expanding variables. */
1458 static void
1459 init_vars_expansion (void)
1461 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1462 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1464 /* A map from decl to stack partition. */
1465 decl_to_stack_part = pointer_map_create ();
1467 /* Initialize local stack smashing state. */
1468 has_protected_decls = false;
1469 has_short_buffer = false;
1472 /* Free up stack variable graph data. */
1473 static void
1474 fini_vars_expansion (void)
1476 bitmap_obstack_release (&stack_var_bitmap_obstack);
1477 if (stack_vars)
1478 XDELETEVEC (stack_vars);
1479 if (stack_vars_sorted)
1480 XDELETEVEC (stack_vars_sorted);
1481 stack_vars = NULL;
1482 stack_vars_sorted = NULL;
1483 stack_vars_alloc = stack_vars_num = 0;
1484 pointer_map_destroy (decl_to_stack_part);
1485 decl_to_stack_part = NULL;
1488 /* Make a fair guess for the size of the stack frame of the function
1489 in NODE. This doesn't have to be exact, the result is only used in
1490 the inline heuristics. So we don't want to run the full stack var
1491 packing algorithm (which is quadratic in the number of stack vars).
1492 Instead, we calculate the total size of all stack vars. This turns
1493 out to be a pretty fair estimate -- packing of stack vars doesn't
1494 happen very often. */
1496 HOST_WIDE_INT
1497 estimated_stack_frame_size (struct cgraph_node *node)
1499 HOST_WIDE_INT size = 0;
1500 size_t i;
1501 tree var;
1502 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1504 push_cfun (fn);
1506 init_vars_expansion ();
1508 FOR_EACH_LOCAL_DECL (fn, i, var)
1509 if (auto_var_in_fn_p (var, fn->decl))
1510 size += expand_one_var (var, true, false);
1512 if (stack_vars_num > 0)
1514 /* Fake sorting the stack vars for account_stack_vars (). */
1515 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1516 for (i = 0; i < stack_vars_num; ++i)
1517 stack_vars_sorted[i] = i;
1518 size += account_stack_vars ();
1521 fini_vars_expansion ();
1522 pop_cfun ();
1523 return size;
1526 /* Helper routine to check if a record or union contains an array field. */
1528 static int
1529 record_or_union_type_has_array_p (const_tree tree_type)
1531 tree fields = TYPE_FIELDS (tree_type);
1532 tree f;
1534 for (f = fields; f; f = DECL_CHAIN (f))
1535 if (TREE_CODE (f) == FIELD_DECL)
1537 tree field_type = TREE_TYPE (f);
1538 if (RECORD_OR_UNION_TYPE_P (field_type)
1539 && record_or_union_type_has_array_p (field_type))
1540 return 1;
1541 if (TREE_CODE (field_type) == ARRAY_TYPE)
1542 return 1;
1544 return 0;
1547 /* Expand all variables used in the function. */
1549 static rtx
1550 expand_used_vars (void)
1552 tree var, outer_block = DECL_INITIAL (current_function_decl);
1553 vec<tree> maybe_local_decls = vNULL;
1554 rtx var_end_seq = NULL_RTX;
1555 struct pointer_map_t *ssa_name_decls;
1556 unsigned i;
1557 unsigned len;
1558 bool gen_stack_protect_signal = false;
1560 /* Compute the phase of the stack frame for this function. */
1562 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1563 int off = STARTING_FRAME_OFFSET % align;
1564 frame_phase = off ? align - off : 0;
1567 /* Set TREE_USED on all variables in the local_decls. */
1568 FOR_EACH_LOCAL_DECL (cfun, i, var)
1569 TREE_USED (var) = 1;
1570 /* Clear TREE_USED on all variables associated with a block scope. */
1571 clear_tree_used (DECL_INITIAL (current_function_decl));
1573 init_vars_expansion ();
1575 ssa_name_decls = pointer_map_create ();
1576 for (i = 0; i < SA.map->num_partitions; i++)
1578 tree var = partition_to_var (SA.map, i);
1580 gcc_assert (!virtual_operand_p (var));
1582 /* Assign decls to each SSA name partition, share decls for partitions
1583 we could have coalesced (those with the same type). */
1584 if (SSA_NAME_VAR (var) == NULL_TREE)
1586 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1587 if (!*slot)
1588 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1589 replace_ssa_name_symbol (var, (tree) *slot);
1592 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1593 expand_one_var (var, true, true);
1594 else
1596 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1597 contain the default def (representing the parm or result itself)
1598 we don't do anything here. But those which don't contain the
1599 default def (representing a temporary based on the parm/result)
1600 we need to allocate space just like for normal VAR_DECLs. */
1601 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1603 expand_one_var (var, true, true);
1604 gcc_assert (SA.partition_to_pseudo[i]);
1608 pointer_map_destroy (ssa_name_decls);
1610 if (flag_stack_protect == SPCT_FLAG_STRONG)
1611 FOR_EACH_LOCAL_DECL (cfun, i, var)
1612 if (!is_global_var (var))
1614 tree var_type = TREE_TYPE (var);
1615 /* Examine local referenced variables that have their addresses taken,
1616 contain an array, or are arrays. */
1617 if (TREE_CODE (var) == VAR_DECL
1618 && (TREE_CODE (var_type) == ARRAY_TYPE
1619 || TREE_ADDRESSABLE (var)
1620 || (RECORD_OR_UNION_TYPE_P (var_type)
1621 && record_or_union_type_has_array_p (var_type))))
1623 gen_stack_protect_signal = true;
1624 break;
1628 /* At this point all variables on the local_decls with TREE_USED
1629 set are not associated with any block scope. Lay them out. */
1631 len = vec_safe_length (cfun->local_decls);
1632 FOR_EACH_LOCAL_DECL (cfun, i, var)
1634 bool expand_now = false;
1636 /* Expanded above already. */
1637 if (is_gimple_reg (var))
1639 TREE_USED (var) = 0;
1640 goto next;
1642 /* We didn't set a block for static or extern because it's hard
1643 to tell the difference between a global variable (re)declared
1644 in a local scope, and one that's really declared there to
1645 begin with. And it doesn't really matter much, since we're
1646 not giving them stack space. Expand them now. */
1647 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1648 expand_now = true;
1650 /* If the variable is not associated with any block, then it
1651 was created by the optimizers, and could be live anywhere
1652 in the function. */
1653 else if (TREE_USED (var))
1654 expand_now = true;
1656 /* Finally, mark all variables on the list as used. We'll use
1657 this in a moment when we expand those associated with scopes. */
1658 TREE_USED (var) = 1;
1660 if (expand_now)
1661 expand_one_var (var, true, true);
1663 next:
1664 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1666 rtx rtl = DECL_RTL_IF_SET (var);
1668 /* Keep artificial non-ignored vars in cfun->local_decls
1669 chain until instantiate_decls. */
1670 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1671 add_local_decl (cfun, var);
1672 else if (rtl == NULL_RTX)
1673 /* If rtl isn't set yet, which can happen e.g. with
1674 -fstack-protector, retry before returning from this
1675 function. */
1676 maybe_local_decls.safe_push (var);
1680 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1682 +-----------------+-----------------+
1683 | ...processed... | ...duplicates...|
1684 +-----------------+-----------------+
1686 +-- LEN points here.
1688 We just want the duplicates, as those are the artificial
1689 non-ignored vars that we want to keep until instantiate_decls.
1690 Move them down and truncate the array. */
1691 if (!vec_safe_is_empty (cfun->local_decls))
1692 cfun->local_decls->block_remove (0, len);
1694 /* At this point, all variables within the block tree with TREE_USED
1695 set are actually used by the optimized function. Lay them out. */
1696 expand_used_vars_for_block (outer_block, true);
1698 if (stack_vars_num > 0)
1700 add_scope_conflicts ();
1702 /* If stack protection is enabled, we don't share space between
1703 vulnerable data and non-vulnerable data. */
1704 if (flag_stack_protect)
1705 add_stack_protection_conflicts ();
1707 /* Now that we have collected all stack variables, and have computed a
1708 minimal interference graph, attempt to save some stack space. */
1709 partition_stack_vars ();
1710 if (dump_file)
1711 dump_stack_var_partition ();
1714 switch (flag_stack_protect)
1716 case SPCT_FLAG_ALL:
1717 create_stack_guard ();
1718 break;
1720 case SPCT_FLAG_STRONG:
1721 if (gen_stack_protect_signal
1722 || cfun->calls_alloca || has_protected_decls)
1723 create_stack_guard ();
1724 break;
1726 case SPCT_FLAG_DEFAULT:
1727 if (cfun->calls_alloca || has_protected_decls)
1728 create_stack_guard ();
1729 break;
1731 default:
1735 /* Assign rtl to each variable based on these partitions. */
1736 if (stack_vars_num > 0)
1738 struct stack_vars_data data;
1740 data.asan_vec = vNULL;
1741 data.asan_decl_vec = vNULL;
1743 /* Reorder decls to be protected by iterating over the variables
1744 array multiple times, and allocating out of each phase in turn. */
1745 /* ??? We could probably integrate this into the qsort we did
1746 earlier, such that we naturally see these variables first,
1747 and thus naturally allocate things in the right order. */
1748 if (has_protected_decls)
1750 /* Phase 1 contains only character arrays. */
1751 expand_stack_vars (stack_protect_decl_phase_1, &data);
1753 /* Phase 2 contains other kinds of arrays. */
1754 if (flag_stack_protect == 2)
1755 expand_stack_vars (stack_protect_decl_phase_2, &data);
1758 if (flag_sanitize & SANITIZE_ADDRESS)
1759 /* Phase 3, any partitions that need asan protection
1760 in addition to phase 1 and 2. */
1761 expand_stack_vars (asan_decl_phase_3, &data);
1763 if (!data.asan_vec.is_empty ())
1765 HOST_WIDE_INT prev_offset = frame_offset;
1766 HOST_WIDE_INT offset
1767 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1768 ASAN_RED_ZONE_SIZE);
1769 data.asan_vec.safe_push (prev_offset);
1770 data.asan_vec.safe_push (offset);
1772 var_end_seq
1773 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1774 data.asan_vec.address (),
1775 data.asan_decl_vec. address (),
1776 data.asan_vec.length ());
1779 expand_stack_vars (NULL, &data);
1781 data.asan_vec.release ();
1782 data.asan_decl_vec.release ();
1785 fini_vars_expansion ();
1787 /* If there were any artificial non-ignored vars without rtl
1788 found earlier, see if deferred stack allocation hasn't assigned
1789 rtl to them. */
1790 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1792 rtx rtl = DECL_RTL_IF_SET (var);
1794 /* Keep artificial non-ignored vars in cfun->local_decls
1795 chain until instantiate_decls. */
1796 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1797 add_local_decl (cfun, var);
1799 maybe_local_decls.release ();
1801 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1802 if (STACK_ALIGNMENT_NEEDED)
1804 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1805 if (!FRAME_GROWS_DOWNWARD)
1806 frame_offset += align - 1;
1807 frame_offset &= -align;
1810 return var_end_seq;
1814 /* If we need to produce a detailed dump, print the tree representation
1815 for STMT to the dump file. SINCE is the last RTX after which the RTL
1816 generated for STMT should have been appended. */
1818 static void
1819 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1821 if (dump_file && (dump_flags & TDF_DETAILS))
1823 fprintf (dump_file, "\n;; ");
1824 print_gimple_stmt (dump_file, stmt, 0,
1825 TDF_SLIM | (dump_flags & TDF_LINENO));
1826 fprintf (dump_file, "\n");
1828 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1832 /* Maps the blocks that do not contain tree labels to rtx labels. */
1834 static struct pointer_map_t *lab_rtx_for_bb;
1836 /* Returns the label_rtx expression for a label starting basic block BB. */
1838 static rtx
1839 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1841 gimple_stmt_iterator gsi;
1842 tree lab;
1843 gimple lab_stmt;
1844 void **elt;
1846 if (bb->flags & BB_RTL)
1847 return block_label (bb);
1849 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1850 if (elt)
1851 return (rtx) *elt;
1853 /* Find the tree label if it is present. */
1855 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1857 lab_stmt = gsi_stmt (gsi);
1858 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1859 break;
1861 lab = gimple_label_label (lab_stmt);
1862 if (DECL_NONLOCAL (lab))
1863 break;
1865 return label_rtx (lab);
1868 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1869 *elt = gen_label_rtx ();
1870 return (rtx) *elt;
1874 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1875 of a basic block where we just expanded the conditional at the end,
1876 possibly clean up the CFG and instruction sequence. LAST is the
1877 last instruction before the just emitted jump sequence. */
1879 static void
1880 maybe_cleanup_end_of_block (edge e, rtx last)
1882 /* Special case: when jumpif decides that the condition is
1883 trivial it emits an unconditional jump (and the necessary
1884 barrier). But we still have two edges, the fallthru one is
1885 wrong. purge_dead_edges would clean this up later. Unfortunately
1886 we have to insert insns (and split edges) before
1887 find_many_sub_basic_blocks and hence before purge_dead_edges.
1888 But splitting edges might create new blocks which depend on the
1889 fact that if there are two edges there's no barrier. So the
1890 barrier would get lost and verify_flow_info would ICE. Instead
1891 of auditing all edge splitters to care for the barrier (which
1892 normally isn't there in a cleaned CFG), fix it here. */
1893 if (BARRIER_P (get_last_insn ()))
1895 rtx insn;
1896 remove_edge (e);
1897 /* Now, we have a single successor block, if we have insns to
1898 insert on the remaining edge we potentially will insert
1899 it at the end of this block (if the dest block isn't feasible)
1900 in order to avoid splitting the edge. This insertion will take
1901 place in front of the last jump. But we might have emitted
1902 multiple jumps (conditional and one unconditional) to the
1903 same destination. Inserting in front of the last one then
1904 is a problem. See PR 40021. We fix this by deleting all
1905 jumps except the last unconditional one. */
1906 insn = PREV_INSN (get_last_insn ());
1907 /* Make sure we have an unconditional jump. Otherwise we're
1908 confused. */
1909 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1910 for (insn = PREV_INSN (insn); insn != last;)
1912 insn = PREV_INSN (insn);
1913 if (JUMP_P (NEXT_INSN (insn)))
1915 if (!any_condjump_p (NEXT_INSN (insn)))
1917 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1918 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1920 delete_insn (NEXT_INSN (insn));
1926 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1927 Returns a new basic block if we've terminated the current basic
1928 block and created a new one. */
1930 static basic_block
1931 expand_gimple_cond (basic_block bb, gimple stmt)
1933 basic_block new_bb, dest;
1934 edge new_edge;
1935 edge true_edge;
1936 edge false_edge;
1937 rtx last2, last;
1938 enum tree_code code;
1939 tree op0, op1;
1941 code = gimple_cond_code (stmt);
1942 op0 = gimple_cond_lhs (stmt);
1943 op1 = gimple_cond_rhs (stmt);
1944 /* We're sometimes presented with such code:
1945 D.123_1 = x < y;
1946 if (D.123_1 != 0)
1948 This would expand to two comparisons which then later might
1949 be cleaned up by combine. But some pattern matchers like if-conversion
1950 work better when there's only one compare, so make up for this
1951 here as special exception if TER would have made the same change. */
1952 if (SA.values
1953 && TREE_CODE (op0) == SSA_NAME
1954 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1955 && TREE_CODE (op1) == INTEGER_CST
1956 && ((gimple_cond_code (stmt) == NE_EXPR
1957 && integer_zerop (op1))
1958 || (gimple_cond_code (stmt) == EQ_EXPR
1959 && integer_onep (op1)))
1960 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1962 gimple second = SSA_NAME_DEF_STMT (op0);
1963 if (gimple_code (second) == GIMPLE_ASSIGN)
1965 enum tree_code code2 = gimple_assign_rhs_code (second);
1966 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1968 code = code2;
1969 op0 = gimple_assign_rhs1 (second);
1970 op1 = gimple_assign_rhs2 (second);
1972 /* If jumps are cheap turn some more codes into
1973 jumpy sequences. */
1974 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1976 if ((code2 == BIT_AND_EXPR
1977 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1978 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1979 || code2 == TRUTH_AND_EXPR)
1981 code = TRUTH_ANDIF_EXPR;
1982 op0 = gimple_assign_rhs1 (second);
1983 op1 = gimple_assign_rhs2 (second);
1985 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1987 code = TRUTH_ORIF_EXPR;
1988 op0 = gimple_assign_rhs1 (second);
1989 op1 = gimple_assign_rhs2 (second);
1995 last2 = last = get_last_insn ();
1997 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1998 set_curr_insn_location (gimple_location (stmt));
2000 /* These flags have no purpose in RTL land. */
2001 true_edge->flags &= ~EDGE_TRUE_VALUE;
2002 false_edge->flags &= ~EDGE_FALSE_VALUE;
2004 /* We can either have a pure conditional jump with one fallthru edge or
2005 two-way jump that needs to be decomposed into two basic blocks. */
2006 if (false_edge->dest == bb->next_bb)
2008 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2009 true_edge->probability);
2010 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2011 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2012 set_curr_insn_location (true_edge->goto_locus);
2013 false_edge->flags |= EDGE_FALLTHRU;
2014 maybe_cleanup_end_of_block (false_edge, last);
2015 return NULL;
2017 if (true_edge->dest == bb->next_bb)
2019 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2020 false_edge->probability);
2021 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2022 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2023 set_curr_insn_location (false_edge->goto_locus);
2024 true_edge->flags |= EDGE_FALLTHRU;
2025 maybe_cleanup_end_of_block (true_edge, last);
2026 return NULL;
2029 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2030 true_edge->probability);
2031 last = get_last_insn ();
2032 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2033 set_curr_insn_location (false_edge->goto_locus);
2034 emit_jump (label_rtx_for_bb (false_edge->dest));
2036 BB_END (bb) = last;
2037 if (BARRIER_P (BB_END (bb)))
2038 BB_END (bb) = PREV_INSN (BB_END (bb));
2039 update_bb_for_insn (bb);
2041 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2042 dest = false_edge->dest;
2043 redirect_edge_succ (false_edge, new_bb);
2044 false_edge->flags |= EDGE_FALLTHRU;
2045 new_bb->count = false_edge->count;
2046 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2047 if (current_loops && bb->loop_father)
2048 add_bb_to_loop (new_bb, bb->loop_father);
2049 new_edge = make_edge (new_bb, dest, 0);
2050 new_edge->probability = REG_BR_PROB_BASE;
2051 new_edge->count = new_bb->count;
2052 if (BARRIER_P (BB_END (new_bb)))
2053 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2054 update_bb_for_insn (new_bb);
2056 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2058 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2060 set_curr_insn_location (true_edge->goto_locus);
2061 true_edge->goto_locus = curr_insn_location ();
2064 return new_bb;
2067 /* Mark all calls that can have a transaction restart. */
2069 static void
2070 mark_transaction_restart_calls (gimple stmt)
2072 struct tm_restart_node dummy;
2073 void **slot;
2075 if (!cfun->gimple_df->tm_restart)
2076 return;
2078 dummy.stmt = stmt;
2079 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2080 if (slot)
2082 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2083 tree list = n->label_or_list;
2084 rtx insn;
2086 for (insn = next_real_insn (get_last_insn ());
2087 !CALL_P (insn);
2088 insn = next_real_insn (insn))
2089 continue;
2091 if (TREE_CODE (list) == LABEL_DECL)
2092 add_reg_note (insn, REG_TM, label_rtx (list));
2093 else
2094 for (; list ; list = TREE_CHAIN (list))
2095 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2099 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2100 statement STMT. */
2102 static void
2103 expand_call_stmt (gimple stmt)
2105 tree exp, decl, lhs;
2106 bool builtin_p;
2107 size_t i;
2109 if (gimple_call_internal_p (stmt))
2111 expand_internal_call (stmt);
2112 return;
2115 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2117 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2118 decl = gimple_call_fndecl (stmt);
2119 builtin_p = decl && DECL_BUILT_IN (decl);
2121 /* If this is not a builtin function, the function type through which the
2122 call is made may be different from the type of the function. */
2123 if (!builtin_p)
2124 CALL_EXPR_FN (exp)
2125 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2126 CALL_EXPR_FN (exp));
2128 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2129 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2131 for (i = 0; i < gimple_call_num_args (stmt); i++)
2133 tree arg = gimple_call_arg (stmt, i);
2134 gimple def;
2135 /* TER addresses into arguments of builtin functions so we have a
2136 chance to infer more correct alignment information. See PR39954. */
2137 if (builtin_p
2138 && TREE_CODE (arg) == SSA_NAME
2139 && (def = get_gimple_for_ssa_name (arg))
2140 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2141 arg = gimple_assign_rhs1 (def);
2142 CALL_EXPR_ARG (exp, i) = arg;
2145 if (gimple_has_side_effects (stmt))
2146 TREE_SIDE_EFFECTS (exp) = 1;
2148 if (gimple_call_nothrow_p (stmt))
2149 TREE_NOTHROW (exp) = 1;
2151 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2152 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2153 if (decl
2154 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2155 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2156 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2157 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2158 else
2159 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2160 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2161 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2163 /* Ensure RTL is created for debug args. */
2164 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2166 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2167 unsigned int ix;
2168 tree dtemp;
2170 if (debug_args)
2171 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2173 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2174 expand_debug_expr (dtemp);
2178 lhs = gimple_call_lhs (stmt);
2179 if (lhs)
2180 expand_assignment (lhs, exp, false);
2181 else
2182 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2184 mark_transaction_restart_calls (stmt);
2187 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2188 STMT that doesn't require special handling for outgoing edges. That
2189 is no tailcalls and no GIMPLE_COND. */
2191 static void
2192 expand_gimple_stmt_1 (gimple stmt)
2194 tree op0;
2196 set_curr_insn_location (gimple_location (stmt));
2198 switch (gimple_code (stmt))
2200 case GIMPLE_GOTO:
2201 op0 = gimple_goto_dest (stmt);
2202 if (TREE_CODE (op0) == LABEL_DECL)
2203 expand_goto (op0);
2204 else
2205 expand_computed_goto (op0);
2206 break;
2207 case GIMPLE_LABEL:
2208 expand_label (gimple_label_label (stmt));
2209 break;
2210 case GIMPLE_NOP:
2211 case GIMPLE_PREDICT:
2212 break;
2213 case GIMPLE_SWITCH:
2214 expand_case (stmt);
2215 break;
2216 case GIMPLE_ASM:
2217 expand_asm_stmt (stmt);
2218 break;
2219 case GIMPLE_CALL:
2220 expand_call_stmt (stmt);
2221 break;
2223 case GIMPLE_RETURN:
2224 op0 = gimple_return_retval (stmt);
2226 if (op0 && op0 != error_mark_node)
2228 tree result = DECL_RESULT (current_function_decl);
2230 /* If we are not returning the current function's RESULT_DECL,
2231 build an assignment to it. */
2232 if (op0 != result)
2234 /* I believe that a function's RESULT_DECL is unique. */
2235 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2237 /* ??? We'd like to use simply expand_assignment here,
2238 but this fails if the value is of BLKmode but the return
2239 decl is a register. expand_return has special handling
2240 for this combination, which eventually should move
2241 to common code. See comments there. Until then, let's
2242 build a modify expression :-/ */
2243 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2244 result, op0);
2247 if (!op0)
2248 expand_null_return ();
2249 else
2250 expand_return (op0);
2251 break;
2253 case GIMPLE_ASSIGN:
2255 tree lhs = gimple_assign_lhs (stmt);
2257 /* Tree expand used to fiddle with |= and &= of two bitfield
2258 COMPONENT_REFs here. This can't happen with gimple, the LHS
2259 of binary assigns must be a gimple reg. */
2261 if (TREE_CODE (lhs) != SSA_NAME
2262 || get_gimple_rhs_class (gimple_expr_code (stmt))
2263 == GIMPLE_SINGLE_RHS)
2265 tree rhs = gimple_assign_rhs1 (stmt);
2266 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2267 == GIMPLE_SINGLE_RHS);
2268 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2269 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2270 if (TREE_CLOBBER_P (rhs))
2271 /* This is a clobber to mark the going out of scope for
2272 this LHS. */
2274 else
2275 expand_assignment (lhs, rhs,
2276 gimple_assign_nontemporal_move_p (stmt));
2278 else
2280 rtx target, temp;
2281 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2282 struct separate_ops ops;
2283 bool promoted = false;
2285 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2286 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2287 promoted = true;
2289 ops.code = gimple_assign_rhs_code (stmt);
2290 ops.type = TREE_TYPE (lhs);
2291 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2293 case GIMPLE_TERNARY_RHS:
2294 ops.op2 = gimple_assign_rhs3 (stmt);
2295 /* Fallthru */
2296 case GIMPLE_BINARY_RHS:
2297 ops.op1 = gimple_assign_rhs2 (stmt);
2298 /* Fallthru */
2299 case GIMPLE_UNARY_RHS:
2300 ops.op0 = gimple_assign_rhs1 (stmt);
2301 break;
2302 default:
2303 gcc_unreachable ();
2305 ops.location = gimple_location (stmt);
2307 /* If we want to use a nontemporal store, force the value to
2308 register first. If we store into a promoted register,
2309 don't directly expand to target. */
2310 temp = nontemporal || promoted ? NULL_RTX : target;
2311 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2312 EXPAND_NORMAL);
2314 if (temp == target)
2316 else if (promoted)
2318 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2319 /* If TEMP is a VOIDmode constant, use convert_modes to make
2320 sure that we properly convert it. */
2321 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2323 temp = convert_modes (GET_MODE (target),
2324 TYPE_MODE (ops.type),
2325 temp, unsignedp);
2326 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2327 GET_MODE (target), temp, unsignedp);
2330 convert_move (SUBREG_REG (target), temp, unsignedp);
2332 else if (nontemporal && emit_storent_insn (target, temp))
2334 else
2336 temp = force_operand (temp, target);
2337 if (temp != target)
2338 emit_move_insn (target, temp);
2342 break;
2344 default:
2345 gcc_unreachable ();
2349 /* Expand one gimple statement STMT and return the last RTL instruction
2350 before any of the newly generated ones.
2352 In addition to generating the necessary RTL instructions this also
2353 sets REG_EH_REGION notes if necessary and sets the current source
2354 location for diagnostics. */
2356 static rtx
2357 expand_gimple_stmt (gimple stmt)
2359 location_t saved_location = input_location;
2360 rtx last = get_last_insn ();
2361 int lp_nr;
2363 gcc_assert (cfun);
2365 /* We need to save and restore the current source location so that errors
2366 discovered during expansion are emitted with the right location. But
2367 it would be better if the diagnostic routines used the source location
2368 embedded in the tree nodes rather than globals. */
2369 if (gimple_has_location (stmt))
2370 input_location = gimple_location (stmt);
2372 expand_gimple_stmt_1 (stmt);
2374 /* Free any temporaries used to evaluate this statement. */
2375 free_temp_slots ();
2377 input_location = saved_location;
2379 /* Mark all insns that may trap. */
2380 lp_nr = lookup_stmt_eh_lp (stmt);
2381 if (lp_nr)
2383 rtx insn;
2384 for (insn = next_real_insn (last); insn;
2385 insn = next_real_insn (insn))
2387 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2388 /* If we want exceptions for non-call insns, any
2389 may_trap_p instruction may throw. */
2390 && GET_CODE (PATTERN (insn)) != CLOBBER
2391 && GET_CODE (PATTERN (insn)) != USE
2392 && insn_could_throw_p (insn))
2393 make_reg_eh_region_note (insn, 0, lp_nr);
2397 return last;
2400 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2401 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2402 generated a tail call (something that might be denied by the ABI
2403 rules governing the call; see calls.c).
2405 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2406 can still reach the rest of BB. The case here is __builtin_sqrt,
2407 where the NaN result goes through the external function (with a
2408 tailcall) and the normal result happens via a sqrt instruction. */
2410 static basic_block
2411 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2413 rtx last2, last;
2414 edge e;
2415 edge_iterator ei;
2416 int probability;
2417 gcov_type count;
2419 last2 = last = expand_gimple_stmt (stmt);
2421 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2422 if (CALL_P (last) && SIBLING_CALL_P (last))
2423 goto found;
2425 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2427 *can_fallthru = true;
2428 return NULL;
2430 found:
2431 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2432 Any instructions emitted here are about to be deleted. */
2433 do_pending_stack_adjust ();
2435 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2436 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2437 EH or abnormal edges, we shouldn't have created a tail call in
2438 the first place. So it seems to me we should just be removing
2439 all edges here, or redirecting the existing fallthru edge to
2440 the exit block. */
2442 probability = 0;
2443 count = 0;
2445 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2447 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2449 if (e->dest != EXIT_BLOCK_PTR)
2451 e->dest->count -= e->count;
2452 e->dest->frequency -= EDGE_FREQUENCY (e);
2453 if (e->dest->count < 0)
2454 e->dest->count = 0;
2455 if (e->dest->frequency < 0)
2456 e->dest->frequency = 0;
2458 count += e->count;
2459 probability += e->probability;
2460 remove_edge (e);
2462 else
2463 ei_next (&ei);
2466 /* This is somewhat ugly: the call_expr expander often emits instructions
2467 after the sibcall (to perform the function return). These confuse the
2468 find_many_sub_basic_blocks code, so we need to get rid of these. */
2469 last = NEXT_INSN (last);
2470 gcc_assert (BARRIER_P (last));
2472 *can_fallthru = false;
2473 while (NEXT_INSN (last))
2475 /* For instance an sqrt builtin expander expands if with
2476 sibcall in the then and label for `else`. */
2477 if (LABEL_P (NEXT_INSN (last)))
2479 *can_fallthru = true;
2480 break;
2482 delete_insn (NEXT_INSN (last));
2485 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2486 e->probability += probability;
2487 e->count += count;
2488 BB_END (bb) = last;
2489 update_bb_for_insn (bb);
2491 if (NEXT_INSN (last))
2493 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2495 last = BB_END (bb);
2496 if (BARRIER_P (last))
2497 BB_END (bb) = PREV_INSN (last);
2500 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2502 return bb;
2505 /* Return the difference between the floor and the truncated result of
2506 a signed division by OP1 with remainder MOD. */
2507 static rtx
2508 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2510 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2511 return gen_rtx_IF_THEN_ELSE
2512 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2513 gen_rtx_IF_THEN_ELSE
2514 (mode, gen_rtx_LT (BImode,
2515 gen_rtx_DIV (mode, op1, mod),
2516 const0_rtx),
2517 constm1_rtx, const0_rtx),
2518 const0_rtx);
2521 /* Return the difference between the ceil and the truncated result of
2522 a signed division by OP1 with remainder MOD. */
2523 static rtx
2524 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2526 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2527 return gen_rtx_IF_THEN_ELSE
2528 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2529 gen_rtx_IF_THEN_ELSE
2530 (mode, gen_rtx_GT (BImode,
2531 gen_rtx_DIV (mode, op1, mod),
2532 const0_rtx),
2533 const1_rtx, const0_rtx),
2534 const0_rtx);
2537 /* Return the difference between the ceil and the truncated result of
2538 an unsigned division by OP1 with remainder MOD. */
2539 static rtx
2540 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2542 /* (mod != 0 ? 1 : 0) */
2543 return gen_rtx_IF_THEN_ELSE
2544 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2545 const1_rtx, const0_rtx);
2548 /* Return the difference between the rounded and the truncated result
2549 of a signed division by OP1 with remainder MOD. Halfway cases are
2550 rounded away from zero, rather than to the nearest even number. */
2551 static rtx
2552 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2554 /* (abs (mod) >= abs (op1) - abs (mod)
2555 ? (op1 / mod > 0 ? 1 : -1)
2556 : 0) */
2557 return gen_rtx_IF_THEN_ELSE
2558 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2559 gen_rtx_MINUS (mode,
2560 gen_rtx_ABS (mode, op1),
2561 gen_rtx_ABS (mode, mod))),
2562 gen_rtx_IF_THEN_ELSE
2563 (mode, gen_rtx_GT (BImode,
2564 gen_rtx_DIV (mode, op1, mod),
2565 const0_rtx),
2566 const1_rtx, constm1_rtx),
2567 const0_rtx);
2570 /* Return the difference between the rounded and the truncated result
2571 of a unsigned division by OP1 with remainder MOD. Halfway cases
2572 are rounded away from zero, rather than to the nearest even
2573 number. */
2574 static rtx
2575 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2577 /* (mod >= op1 - mod ? 1 : 0) */
2578 return gen_rtx_IF_THEN_ELSE
2579 (mode, gen_rtx_GE (BImode, mod,
2580 gen_rtx_MINUS (mode, op1, mod)),
2581 const1_rtx, const0_rtx);
2584 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2585 any rtl. */
2587 static rtx
2588 convert_debug_memory_address (enum machine_mode mode, rtx x,
2589 addr_space_t as)
2591 enum machine_mode xmode = GET_MODE (x);
2593 #ifndef POINTERS_EXTEND_UNSIGNED
2594 gcc_assert (mode == Pmode
2595 || mode == targetm.addr_space.address_mode (as));
2596 gcc_assert (xmode == mode || xmode == VOIDmode);
2597 #else
2598 rtx temp;
2600 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2602 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2603 return x;
2605 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2606 x = simplify_gen_subreg (mode, x, xmode,
2607 subreg_lowpart_offset
2608 (mode, xmode));
2609 else if (POINTERS_EXTEND_UNSIGNED > 0)
2610 x = gen_rtx_ZERO_EXTEND (mode, x);
2611 else if (!POINTERS_EXTEND_UNSIGNED)
2612 x = gen_rtx_SIGN_EXTEND (mode, x);
2613 else
2615 switch (GET_CODE (x))
2617 case SUBREG:
2618 if ((SUBREG_PROMOTED_VAR_P (x)
2619 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2620 || (GET_CODE (SUBREG_REG (x)) == PLUS
2621 && REG_P (XEXP (SUBREG_REG (x), 0))
2622 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2623 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2624 && GET_MODE (SUBREG_REG (x)) == mode)
2625 return SUBREG_REG (x);
2626 break;
2627 case LABEL_REF:
2628 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2629 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2630 return temp;
2631 case SYMBOL_REF:
2632 temp = shallow_copy_rtx (x);
2633 PUT_MODE (temp, mode);
2634 return temp;
2635 case CONST:
2636 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2637 if (temp)
2638 temp = gen_rtx_CONST (mode, temp);
2639 return temp;
2640 case PLUS:
2641 case MINUS:
2642 if (CONST_INT_P (XEXP (x, 1)))
2644 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2645 if (temp)
2646 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2648 break;
2649 default:
2650 break;
2652 /* Don't know how to express ptr_extend as operation in debug info. */
2653 return NULL;
2655 #endif /* POINTERS_EXTEND_UNSIGNED */
2657 return x;
2660 /* Return an RTX equivalent to the value of the parameter DECL. */
2662 static rtx
2663 expand_debug_parm_decl (tree decl)
2665 rtx incoming = DECL_INCOMING_RTL (decl);
2667 if (incoming
2668 && GET_MODE (incoming) != BLKmode
2669 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2670 || (MEM_P (incoming)
2671 && REG_P (XEXP (incoming, 0))
2672 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2674 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2676 #ifdef HAVE_window_save
2677 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2678 If the target machine has an explicit window save instruction, the
2679 actual entry value is the corresponding OUTGOING_REGNO instead. */
2680 if (REG_P (incoming)
2681 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2682 incoming
2683 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2684 OUTGOING_REGNO (REGNO (incoming)), 0);
2685 else if (MEM_P (incoming))
2687 rtx reg = XEXP (incoming, 0);
2688 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2690 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2691 incoming = replace_equiv_address_nv (incoming, reg);
2693 else
2694 incoming = copy_rtx (incoming);
2696 #endif
2698 ENTRY_VALUE_EXP (rtl) = incoming;
2699 return rtl;
2702 if (incoming
2703 && GET_MODE (incoming) != BLKmode
2704 && !TREE_ADDRESSABLE (decl)
2705 && MEM_P (incoming)
2706 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2707 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2708 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2709 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2710 return copy_rtx (incoming);
2712 return NULL_RTX;
2715 /* Return an RTX equivalent to the value of the tree expression EXP. */
2717 static rtx
2718 expand_debug_expr (tree exp)
2720 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2721 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2722 enum machine_mode inner_mode = VOIDmode;
2723 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2724 addr_space_t as;
2726 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2728 case tcc_expression:
2729 switch (TREE_CODE (exp))
2731 case COND_EXPR:
2732 case DOT_PROD_EXPR:
2733 case WIDEN_MULT_PLUS_EXPR:
2734 case WIDEN_MULT_MINUS_EXPR:
2735 case FMA_EXPR:
2736 goto ternary;
2738 case TRUTH_ANDIF_EXPR:
2739 case TRUTH_ORIF_EXPR:
2740 case TRUTH_AND_EXPR:
2741 case TRUTH_OR_EXPR:
2742 case TRUTH_XOR_EXPR:
2743 goto binary;
2745 case TRUTH_NOT_EXPR:
2746 goto unary;
2748 default:
2749 break;
2751 break;
2753 ternary:
2754 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2755 if (!op2)
2756 return NULL_RTX;
2757 /* Fall through. */
2759 binary:
2760 case tcc_binary:
2761 case tcc_comparison:
2762 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2763 if (!op1)
2764 return NULL_RTX;
2765 /* Fall through. */
2767 unary:
2768 case tcc_unary:
2769 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2770 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2771 if (!op0)
2772 return NULL_RTX;
2773 break;
2775 case tcc_type:
2776 case tcc_statement:
2777 gcc_unreachable ();
2779 case tcc_constant:
2780 case tcc_exceptional:
2781 case tcc_declaration:
2782 case tcc_reference:
2783 case tcc_vl_exp:
2784 break;
2787 switch (TREE_CODE (exp))
2789 case STRING_CST:
2790 if (!lookup_constant_def (exp))
2792 if (strlen (TREE_STRING_POINTER (exp)) + 1
2793 != (size_t) TREE_STRING_LENGTH (exp))
2794 return NULL_RTX;
2795 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2796 op0 = gen_rtx_MEM (BLKmode, op0);
2797 set_mem_attributes (op0, exp, 0);
2798 return op0;
2800 /* Fall through... */
2802 case INTEGER_CST:
2803 case REAL_CST:
2804 case FIXED_CST:
2805 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2806 return op0;
2808 case COMPLEX_CST:
2809 gcc_assert (COMPLEX_MODE_P (mode));
2810 op0 = expand_debug_expr (TREE_REALPART (exp));
2811 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2812 return gen_rtx_CONCAT (mode, op0, op1);
2814 case DEBUG_EXPR_DECL:
2815 op0 = DECL_RTL_IF_SET (exp);
2817 if (op0)
2818 return op0;
2820 op0 = gen_rtx_DEBUG_EXPR (mode);
2821 DEBUG_EXPR_TREE_DECL (op0) = exp;
2822 SET_DECL_RTL (exp, op0);
2824 return op0;
2826 case VAR_DECL:
2827 case PARM_DECL:
2828 case FUNCTION_DECL:
2829 case LABEL_DECL:
2830 case CONST_DECL:
2831 case RESULT_DECL:
2832 op0 = DECL_RTL_IF_SET (exp);
2834 /* This decl was probably optimized away. */
2835 if (!op0)
2837 if (TREE_CODE (exp) != VAR_DECL
2838 || DECL_EXTERNAL (exp)
2839 || !TREE_STATIC (exp)
2840 || !DECL_NAME (exp)
2841 || DECL_HARD_REGISTER (exp)
2842 || DECL_IN_CONSTANT_POOL (exp)
2843 || mode == VOIDmode)
2844 return NULL;
2846 op0 = make_decl_rtl_for_debug (exp);
2847 if (!MEM_P (op0)
2848 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2849 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2850 return NULL;
2852 else
2853 op0 = copy_rtx (op0);
2855 if (GET_MODE (op0) == BLKmode
2856 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2857 below would ICE. While it is likely a FE bug,
2858 try to be robust here. See PR43166. */
2859 || mode == BLKmode
2860 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2862 gcc_assert (MEM_P (op0));
2863 op0 = adjust_address_nv (op0, mode, 0);
2864 return op0;
2867 /* Fall through. */
2869 adjust_mode:
2870 case PAREN_EXPR:
2871 case NOP_EXPR:
2872 case CONVERT_EXPR:
2874 inner_mode = GET_MODE (op0);
2876 if (mode == inner_mode)
2877 return op0;
2879 if (inner_mode == VOIDmode)
2881 if (TREE_CODE (exp) == SSA_NAME)
2882 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2883 else
2884 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2885 if (mode == inner_mode)
2886 return op0;
2889 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2891 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2892 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2893 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2894 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2895 else
2896 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2898 else if (FLOAT_MODE_P (mode))
2900 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2901 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2902 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2903 else
2904 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2906 else if (FLOAT_MODE_P (inner_mode))
2908 if (unsignedp)
2909 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2910 else
2911 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2913 else if (CONSTANT_P (op0)
2914 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2915 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2916 subreg_lowpart_offset (mode,
2917 inner_mode));
2918 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2919 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2920 : unsignedp)
2921 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2922 else
2923 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2925 return op0;
2928 case MEM_REF:
2929 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2931 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2932 TREE_OPERAND (exp, 0),
2933 TREE_OPERAND (exp, 1));
2934 if (newexp)
2935 return expand_debug_expr (newexp);
2937 /* FALLTHROUGH */
2938 case INDIRECT_REF:
2939 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2940 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2941 if (!op0)
2942 return NULL;
2944 if (TREE_CODE (exp) == MEM_REF)
2946 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2947 || (GET_CODE (op0) == PLUS
2948 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2949 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2950 Instead just use get_inner_reference. */
2951 goto component_ref;
2953 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2954 if (!op1 || !CONST_INT_P (op1))
2955 return NULL;
2957 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2960 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2961 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2962 else
2963 as = ADDR_SPACE_GENERIC;
2965 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2966 op0, as);
2967 if (op0 == NULL_RTX)
2968 return NULL;
2970 op0 = gen_rtx_MEM (mode, op0);
2971 set_mem_attributes (op0, exp, 0);
2972 if (TREE_CODE (exp) == MEM_REF
2973 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2974 set_mem_expr (op0, NULL_TREE);
2975 set_mem_addr_space (op0, as);
2977 return op0;
2979 case TARGET_MEM_REF:
2980 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2981 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2982 return NULL;
2984 op0 = expand_debug_expr
2985 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2986 if (!op0)
2987 return NULL;
2989 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2990 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2991 else
2992 as = ADDR_SPACE_GENERIC;
2994 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2995 op0, as);
2996 if (op0 == NULL_RTX)
2997 return NULL;
2999 op0 = gen_rtx_MEM (mode, op0);
3001 set_mem_attributes (op0, exp, 0);
3002 set_mem_addr_space (op0, as);
3004 return op0;
3006 component_ref:
3007 case ARRAY_REF:
3008 case ARRAY_RANGE_REF:
3009 case COMPONENT_REF:
3010 case BIT_FIELD_REF:
3011 case REALPART_EXPR:
3012 case IMAGPART_EXPR:
3013 case VIEW_CONVERT_EXPR:
3015 enum machine_mode mode1;
3016 HOST_WIDE_INT bitsize, bitpos;
3017 tree offset;
3018 int volatilep = 0;
3019 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3020 &mode1, &unsignedp, &volatilep, false);
3021 rtx orig_op0;
3023 if (bitsize == 0)
3024 return NULL;
3026 orig_op0 = op0 = expand_debug_expr (tem);
3028 if (!op0)
3029 return NULL;
3031 if (offset)
3033 enum machine_mode addrmode, offmode;
3035 if (!MEM_P (op0))
3036 return NULL;
3038 op0 = XEXP (op0, 0);
3039 addrmode = GET_MODE (op0);
3040 if (addrmode == VOIDmode)
3041 addrmode = Pmode;
3043 op1 = expand_debug_expr (offset);
3044 if (!op1)
3045 return NULL;
3047 offmode = GET_MODE (op1);
3048 if (offmode == VOIDmode)
3049 offmode = TYPE_MODE (TREE_TYPE (offset));
3051 if (addrmode != offmode)
3052 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3053 subreg_lowpart_offset (addrmode,
3054 offmode));
3056 /* Don't use offset_address here, we don't need a
3057 recognizable address, and we don't want to generate
3058 code. */
3059 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3060 op0, op1));
3063 if (MEM_P (op0))
3065 if (mode1 == VOIDmode)
3066 /* Bitfield. */
3067 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
3068 if (bitpos >= BITS_PER_UNIT)
3070 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3071 bitpos %= BITS_PER_UNIT;
3073 else if (bitpos < 0)
3075 HOST_WIDE_INT units
3076 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
3077 op0 = adjust_address_nv (op0, mode1, units);
3078 bitpos += units * BITS_PER_UNIT;
3080 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3081 op0 = adjust_address_nv (op0, mode, 0);
3082 else if (GET_MODE (op0) != mode1)
3083 op0 = adjust_address_nv (op0, mode1, 0);
3084 else
3085 op0 = copy_rtx (op0);
3086 if (op0 == orig_op0)
3087 op0 = shallow_copy_rtx (op0);
3088 set_mem_attributes (op0, exp, 0);
3091 if (bitpos == 0 && mode == GET_MODE (op0))
3092 return op0;
3094 if (bitpos < 0)
3095 return NULL;
3097 if (GET_MODE (op0) == BLKmode)
3098 return NULL;
3100 if ((bitpos % BITS_PER_UNIT) == 0
3101 && bitsize == GET_MODE_BITSIZE (mode1))
3103 enum machine_mode opmode = GET_MODE (op0);
3105 if (opmode == VOIDmode)
3106 opmode = TYPE_MODE (TREE_TYPE (tem));
3108 /* This condition may hold if we're expanding the address
3109 right past the end of an array that turned out not to
3110 be addressable (i.e., the address was only computed in
3111 debug stmts). The gen_subreg below would rightfully
3112 crash, and the address doesn't really exist, so just
3113 drop it. */
3114 if (bitpos >= GET_MODE_BITSIZE (opmode))
3115 return NULL;
3117 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3118 return simplify_gen_subreg (mode, op0, opmode,
3119 bitpos / BITS_PER_UNIT);
3122 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3123 && TYPE_UNSIGNED (TREE_TYPE (exp))
3124 ? SIGN_EXTRACT
3125 : ZERO_EXTRACT, mode,
3126 GET_MODE (op0) != VOIDmode
3127 ? GET_MODE (op0)
3128 : TYPE_MODE (TREE_TYPE (tem)),
3129 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3132 case ABS_EXPR:
3133 return simplify_gen_unary (ABS, mode, op0, mode);
3135 case NEGATE_EXPR:
3136 return simplify_gen_unary (NEG, mode, op0, mode);
3138 case BIT_NOT_EXPR:
3139 return simplify_gen_unary (NOT, mode, op0, mode);
3141 case FLOAT_EXPR:
3142 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3143 0)))
3144 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3145 inner_mode);
3147 case FIX_TRUNC_EXPR:
3148 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3149 inner_mode);
3151 case POINTER_PLUS_EXPR:
3152 /* For the rare target where pointers are not the same size as
3153 size_t, we need to check for mis-matched modes and correct
3154 the addend. */
3155 if (op0 && op1
3156 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3157 && GET_MODE (op0) != GET_MODE (op1))
3159 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
3160 /* If OP0 is a partial mode, then we must truncate, even if it has
3161 the same bitsize as OP1 as GCC's representation of partial modes
3162 is opaque. */
3163 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
3164 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
3165 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3166 GET_MODE (op1));
3167 else
3168 /* We always sign-extend, regardless of the signedness of
3169 the operand, because the operand is always unsigned
3170 here even if the original C expression is signed. */
3171 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3172 GET_MODE (op1));
3174 /* Fall through. */
3175 case PLUS_EXPR:
3176 return simplify_gen_binary (PLUS, mode, op0, op1);
3178 case MINUS_EXPR:
3179 return simplify_gen_binary (MINUS, mode, op0, op1);
3181 case MULT_EXPR:
3182 return simplify_gen_binary (MULT, mode, op0, op1);
3184 case RDIV_EXPR:
3185 case TRUNC_DIV_EXPR:
3186 case EXACT_DIV_EXPR:
3187 if (unsignedp)
3188 return simplify_gen_binary (UDIV, mode, op0, op1);
3189 else
3190 return simplify_gen_binary (DIV, mode, op0, op1);
3192 case TRUNC_MOD_EXPR:
3193 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3195 case FLOOR_DIV_EXPR:
3196 if (unsignedp)
3197 return simplify_gen_binary (UDIV, mode, op0, op1);
3198 else
3200 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3201 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3202 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3203 return simplify_gen_binary (PLUS, mode, div, adj);
3206 case FLOOR_MOD_EXPR:
3207 if (unsignedp)
3208 return simplify_gen_binary (UMOD, mode, op0, op1);
3209 else
3211 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3212 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3213 adj = simplify_gen_unary (NEG, mode,
3214 simplify_gen_binary (MULT, mode, adj, op1),
3215 mode);
3216 return simplify_gen_binary (PLUS, mode, mod, adj);
3219 case CEIL_DIV_EXPR:
3220 if (unsignedp)
3222 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3223 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3224 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3225 return simplify_gen_binary (PLUS, mode, div, adj);
3227 else
3229 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3230 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3231 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3232 return simplify_gen_binary (PLUS, mode, div, adj);
3235 case CEIL_MOD_EXPR:
3236 if (unsignedp)
3238 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3239 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3240 adj = simplify_gen_unary (NEG, mode,
3241 simplify_gen_binary (MULT, mode, adj, op1),
3242 mode);
3243 return simplify_gen_binary (PLUS, mode, mod, adj);
3245 else
3247 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3248 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3249 adj = simplify_gen_unary (NEG, mode,
3250 simplify_gen_binary (MULT, mode, adj, op1),
3251 mode);
3252 return simplify_gen_binary (PLUS, mode, mod, adj);
3255 case ROUND_DIV_EXPR:
3256 if (unsignedp)
3258 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3259 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3260 rtx adj = round_udiv_adjust (mode, mod, op1);
3261 return simplify_gen_binary (PLUS, mode, div, adj);
3263 else
3265 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3266 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3267 rtx adj = round_sdiv_adjust (mode, mod, op1);
3268 return simplify_gen_binary (PLUS, mode, div, adj);
3271 case ROUND_MOD_EXPR:
3272 if (unsignedp)
3274 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3275 rtx adj = round_udiv_adjust (mode, mod, op1);
3276 adj = simplify_gen_unary (NEG, mode,
3277 simplify_gen_binary (MULT, mode, adj, op1),
3278 mode);
3279 return simplify_gen_binary (PLUS, mode, mod, adj);
3281 else
3283 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3284 rtx adj = round_sdiv_adjust (mode, mod, op1);
3285 adj = simplify_gen_unary (NEG, mode,
3286 simplify_gen_binary (MULT, mode, adj, op1),
3287 mode);
3288 return simplify_gen_binary (PLUS, mode, mod, adj);
3291 case LSHIFT_EXPR:
3292 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3294 case RSHIFT_EXPR:
3295 if (unsignedp)
3296 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3297 else
3298 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3300 case LROTATE_EXPR:
3301 return simplify_gen_binary (ROTATE, mode, op0, op1);
3303 case RROTATE_EXPR:
3304 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3306 case MIN_EXPR:
3307 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3309 case MAX_EXPR:
3310 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3312 case BIT_AND_EXPR:
3313 case TRUTH_AND_EXPR:
3314 return simplify_gen_binary (AND, mode, op0, op1);
3316 case BIT_IOR_EXPR:
3317 case TRUTH_OR_EXPR:
3318 return simplify_gen_binary (IOR, mode, op0, op1);
3320 case BIT_XOR_EXPR:
3321 case TRUTH_XOR_EXPR:
3322 return simplify_gen_binary (XOR, mode, op0, op1);
3324 case TRUTH_ANDIF_EXPR:
3325 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3327 case TRUTH_ORIF_EXPR:
3328 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3330 case TRUTH_NOT_EXPR:
3331 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3333 case LT_EXPR:
3334 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3335 op0, op1);
3337 case LE_EXPR:
3338 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3339 op0, op1);
3341 case GT_EXPR:
3342 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3343 op0, op1);
3345 case GE_EXPR:
3346 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3347 op0, op1);
3349 case EQ_EXPR:
3350 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3352 case NE_EXPR:
3353 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3355 case UNORDERED_EXPR:
3356 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3358 case ORDERED_EXPR:
3359 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3361 case UNLT_EXPR:
3362 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3364 case UNLE_EXPR:
3365 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3367 case UNGT_EXPR:
3368 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3370 case UNGE_EXPR:
3371 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3373 case UNEQ_EXPR:
3374 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3376 case LTGT_EXPR:
3377 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3379 case COND_EXPR:
3380 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3382 case COMPLEX_EXPR:
3383 gcc_assert (COMPLEX_MODE_P (mode));
3384 if (GET_MODE (op0) == VOIDmode)
3385 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3386 if (GET_MODE (op1) == VOIDmode)
3387 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3388 return gen_rtx_CONCAT (mode, op0, op1);
3390 case CONJ_EXPR:
3391 if (GET_CODE (op0) == CONCAT)
3392 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3393 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3394 XEXP (op0, 1),
3395 GET_MODE_INNER (mode)));
3396 else
3398 enum machine_mode imode = GET_MODE_INNER (mode);
3399 rtx re, im;
3401 if (MEM_P (op0))
3403 re = adjust_address_nv (op0, imode, 0);
3404 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3406 else
3408 enum machine_mode ifmode = int_mode_for_mode (mode);
3409 enum machine_mode ihmode = int_mode_for_mode (imode);
3410 rtx halfsize;
3411 if (ifmode == BLKmode || ihmode == BLKmode)
3412 return NULL;
3413 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3414 re = op0;
3415 if (mode != ifmode)
3416 re = gen_rtx_SUBREG (ifmode, re, 0);
3417 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3418 if (imode != ihmode)
3419 re = gen_rtx_SUBREG (imode, re, 0);
3420 im = copy_rtx (op0);
3421 if (mode != ifmode)
3422 im = gen_rtx_SUBREG (ifmode, im, 0);
3423 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3424 if (imode != ihmode)
3425 im = gen_rtx_SUBREG (imode, im, 0);
3427 im = gen_rtx_NEG (imode, im);
3428 return gen_rtx_CONCAT (mode, re, im);
3431 case ADDR_EXPR:
3432 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3433 if (!op0 || !MEM_P (op0))
3435 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3436 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3437 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3438 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3439 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3440 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3442 if (handled_component_p (TREE_OPERAND (exp, 0)))
3444 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3445 tree decl
3446 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3447 &bitoffset, &bitsize, &maxsize);
3448 if ((TREE_CODE (decl) == VAR_DECL
3449 || TREE_CODE (decl) == PARM_DECL
3450 || TREE_CODE (decl) == RESULT_DECL)
3451 && (!TREE_ADDRESSABLE (decl)
3452 || target_for_debug_bind (decl))
3453 && (bitoffset % BITS_PER_UNIT) == 0
3454 && bitsize > 0
3455 && bitsize == maxsize)
3457 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3458 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3462 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
3463 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
3464 == ADDR_EXPR)
3466 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3467 0));
3468 if (op0 != NULL
3469 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3470 || (GET_CODE (op0) == PLUS
3471 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
3472 && CONST_INT_P (XEXP (op0, 1)))))
3474 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3475 1));
3476 if (!op1 || !CONST_INT_P (op1))
3477 return NULL;
3479 return plus_constant (mode, op0, INTVAL (op1));
3483 return NULL;
3486 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3487 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3489 return op0;
3491 case VECTOR_CST:
3493 unsigned i;
3495 op0 = gen_rtx_CONCATN
3496 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3498 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3500 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3501 if (!op1)
3502 return NULL;
3503 XVECEXP (op0, 0, i) = op1;
3506 return op0;
3509 case CONSTRUCTOR:
3510 if (TREE_CLOBBER_P (exp))
3511 return NULL;
3512 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3514 unsigned i;
3515 tree val;
3517 op0 = gen_rtx_CONCATN
3518 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3520 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3522 op1 = expand_debug_expr (val);
3523 if (!op1)
3524 return NULL;
3525 XVECEXP (op0, 0, i) = op1;
3528 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3530 op1 = expand_debug_expr
3531 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3533 if (!op1)
3534 return NULL;
3536 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3537 XVECEXP (op0, 0, i) = op1;
3540 return op0;
3542 else
3543 goto flag_unsupported;
3545 case CALL_EXPR:
3546 /* ??? Maybe handle some builtins? */
3547 return NULL;
3549 case SSA_NAME:
3551 gimple g = get_gimple_for_ssa_name (exp);
3552 if (g)
3554 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3555 if (!op0)
3556 return NULL;
3558 else
3560 int part = var_to_partition (SA.map, exp);
3562 if (part == NO_PARTITION)
3564 /* If this is a reference to an incoming value of parameter
3565 that is never used in the code or where the incoming
3566 value is never used in the code, use PARM_DECL's
3567 DECL_RTL if set. */
3568 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3569 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3571 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3572 if (op0)
3573 goto adjust_mode;
3574 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3575 if (op0)
3576 goto adjust_mode;
3578 return NULL;
3581 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3583 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3585 goto adjust_mode;
3588 case ERROR_MARK:
3589 return NULL;
3591 /* Vector stuff. For most of the codes we don't have rtl codes. */
3592 case REALIGN_LOAD_EXPR:
3593 case REDUC_MAX_EXPR:
3594 case REDUC_MIN_EXPR:
3595 case REDUC_PLUS_EXPR:
3596 case VEC_COND_EXPR:
3597 case VEC_LSHIFT_EXPR:
3598 case VEC_PACK_FIX_TRUNC_EXPR:
3599 case VEC_PACK_SAT_EXPR:
3600 case VEC_PACK_TRUNC_EXPR:
3601 case VEC_RSHIFT_EXPR:
3602 case VEC_UNPACK_FLOAT_HI_EXPR:
3603 case VEC_UNPACK_FLOAT_LO_EXPR:
3604 case VEC_UNPACK_HI_EXPR:
3605 case VEC_UNPACK_LO_EXPR:
3606 case VEC_WIDEN_MULT_HI_EXPR:
3607 case VEC_WIDEN_MULT_LO_EXPR:
3608 case VEC_WIDEN_MULT_EVEN_EXPR:
3609 case VEC_WIDEN_MULT_ODD_EXPR:
3610 case VEC_WIDEN_LSHIFT_HI_EXPR:
3611 case VEC_WIDEN_LSHIFT_LO_EXPR:
3612 case VEC_PERM_EXPR:
3613 return NULL;
3615 /* Misc codes. */
3616 case ADDR_SPACE_CONVERT_EXPR:
3617 case FIXED_CONVERT_EXPR:
3618 case OBJ_TYPE_REF:
3619 case WITH_SIZE_EXPR:
3620 return NULL;
3622 case DOT_PROD_EXPR:
3623 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3624 && SCALAR_INT_MODE_P (mode))
3627 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3628 0)))
3629 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3630 inner_mode);
3632 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3633 1)))
3634 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3635 inner_mode);
3636 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3637 return simplify_gen_binary (PLUS, mode, op0, op2);
3639 return NULL;
3641 case WIDEN_MULT_EXPR:
3642 case WIDEN_MULT_PLUS_EXPR:
3643 case WIDEN_MULT_MINUS_EXPR:
3644 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3645 && SCALAR_INT_MODE_P (mode))
3647 inner_mode = GET_MODE (op0);
3648 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3649 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3650 else
3651 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3652 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3653 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3654 else
3655 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3656 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3657 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3658 return op0;
3659 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3660 return simplify_gen_binary (PLUS, mode, op0, op2);
3661 else
3662 return simplify_gen_binary (MINUS, mode, op2, op0);
3664 return NULL;
3666 case MULT_HIGHPART_EXPR:
3667 /* ??? Similar to the above. */
3668 return NULL;
3670 case WIDEN_SUM_EXPR:
3671 case WIDEN_LSHIFT_EXPR:
3672 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3673 && SCALAR_INT_MODE_P (mode))
3676 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3677 0)))
3678 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3679 inner_mode);
3680 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3681 ? ASHIFT : PLUS, mode, op0, op1);
3683 return NULL;
3685 case FMA_EXPR:
3686 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3688 default:
3689 flag_unsupported:
3690 #ifdef ENABLE_CHECKING
3691 debug_tree (exp);
3692 gcc_unreachable ();
3693 #else
3694 return NULL;
3695 #endif
3699 /* Return an RTX equivalent to the source bind value of the tree expression
3700 EXP. */
3702 static rtx
3703 expand_debug_source_expr (tree exp)
3705 rtx op0 = NULL_RTX;
3706 enum machine_mode mode = VOIDmode, inner_mode;
3708 switch (TREE_CODE (exp))
3710 case PARM_DECL:
3712 mode = DECL_MODE (exp);
3713 op0 = expand_debug_parm_decl (exp);
3714 if (op0)
3715 break;
3716 /* See if this isn't an argument that has been completely
3717 optimized out. */
3718 if (!DECL_RTL_SET_P (exp)
3719 && !DECL_INCOMING_RTL (exp)
3720 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3722 tree aexp = DECL_ORIGIN (exp);
3723 if (DECL_CONTEXT (aexp)
3724 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3726 vec<tree, va_gc> **debug_args;
3727 unsigned int ix;
3728 tree ddecl;
3729 debug_args = decl_debug_args_lookup (current_function_decl);
3730 if (debug_args != NULL)
3732 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
3733 ix += 2)
3734 if (ddecl == aexp)
3735 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3739 break;
3741 default:
3742 break;
3745 if (op0 == NULL_RTX)
3746 return NULL_RTX;
3748 inner_mode = GET_MODE (op0);
3749 if (mode == inner_mode)
3750 return op0;
3752 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3754 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3755 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3756 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3757 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3758 else
3759 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3761 else if (FLOAT_MODE_P (mode))
3762 gcc_unreachable ();
3763 else if (FLOAT_MODE_P (inner_mode))
3765 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3766 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3767 else
3768 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3770 else if (CONSTANT_P (op0)
3771 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3772 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3773 subreg_lowpart_offset (mode, inner_mode));
3774 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3775 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3776 else
3777 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3779 return op0;
3782 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3783 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3784 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
3786 static void
3787 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3789 rtx exp = *exp_p;
3791 if (exp == NULL_RTX)
3792 return;
3794 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3795 return;
3797 if (depth == 4)
3799 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3800 rtx dval = make_debug_expr_from_rtl (exp);
3802 /* Emit a debug bind insn before INSN. */
3803 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3804 DEBUG_EXPR_TREE_DECL (dval), exp,
3805 VAR_INIT_STATUS_INITIALIZED);
3807 emit_debug_insn_before (bind, insn);
3808 *exp_p = dval;
3809 return;
3812 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3813 int i, j;
3814 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3815 switch (*format_ptr++)
3817 case 'e':
3818 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3819 break;
3821 case 'E':
3822 case 'V':
3823 for (j = 0; j < XVECLEN (exp, i); j++)
3824 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3825 break;
3827 default:
3828 break;
3832 /* Expand the _LOCs in debug insns. We run this after expanding all
3833 regular insns, so that any variables referenced in the function
3834 will have their DECL_RTLs set. */
3836 static void
3837 expand_debug_locations (void)
3839 rtx insn;
3840 rtx last = get_last_insn ();
3841 int save_strict_alias = flag_strict_aliasing;
3843 /* New alias sets while setting up memory attributes cause
3844 -fcompare-debug failures, even though it doesn't bring about any
3845 codegen changes. */
3846 flag_strict_aliasing = 0;
3848 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3849 if (DEBUG_INSN_P (insn))
3851 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3852 rtx val, prev_insn, insn2;
3853 enum machine_mode mode;
3855 if (value == NULL_TREE)
3856 val = NULL_RTX;
3857 else
3859 if (INSN_VAR_LOCATION_STATUS (insn)
3860 == VAR_INIT_STATUS_UNINITIALIZED)
3861 val = expand_debug_source_expr (value);
3862 else
3863 val = expand_debug_expr (value);
3864 gcc_assert (last == get_last_insn ());
3867 if (!val)
3868 val = gen_rtx_UNKNOWN_VAR_LOC ();
3869 else
3871 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3873 gcc_assert (mode == GET_MODE (val)
3874 || (GET_MODE (val) == VOIDmode
3875 && (CONST_SCALAR_INT_P (val)
3876 || GET_CODE (val) == CONST_FIXED
3877 || GET_CODE (val) == LABEL_REF)));
3880 INSN_VAR_LOCATION_LOC (insn) = val;
3881 prev_insn = PREV_INSN (insn);
3882 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3883 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
3886 flag_strict_aliasing = save_strict_alias;
3889 /* Expand basic block BB from GIMPLE trees to RTL. */
3891 static basic_block
3892 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
3894 gimple_stmt_iterator gsi;
3895 gimple_seq stmts;
3896 gimple stmt = NULL;
3897 rtx note, last;
3898 edge e;
3899 edge_iterator ei;
3900 void **elt;
3902 if (dump_file)
3903 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3904 bb->index);
3906 /* Note that since we are now transitioning from GIMPLE to RTL, we
3907 cannot use the gsi_*_bb() routines because they expect the basic
3908 block to be in GIMPLE, instead of RTL. Therefore, we need to
3909 access the BB sequence directly. */
3910 stmts = bb_seq (bb);
3911 bb->il.gimple.seq = NULL;
3912 bb->il.gimple.phi_nodes = NULL;
3913 rtl_profile_for_bb (bb);
3914 init_rtl_bb_info (bb);
3915 bb->flags |= BB_RTL;
3917 /* Remove the RETURN_EXPR if we may fall though to the exit
3918 instead. */
3919 gsi = gsi_last (stmts);
3920 if (!gsi_end_p (gsi)
3921 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3923 gimple ret_stmt = gsi_stmt (gsi);
3925 gcc_assert (single_succ_p (bb));
3926 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3928 if (bb->next_bb == EXIT_BLOCK_PTR
3929 && !gimple_return_retval (ret_stmt))
3931 gsi_remove (&gsi, false);
3932 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3936 gsi = gsi_start (stmts);
3937 if (!gsi_end_p (gsi))
3939 stmt = gsi_stmt (gsi);
3940 if (gimple_code (stmt) != GIMPLE_LABEL)
3941 stmt = NULL;
3944 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3946 if (stmt || elt)
3948 last = get_last_insn ();
3950 if (stmt)
3952 expand_gimple_stmt (stmt);
3953 gsi_next (&gsi);
3956 if (elt)
3957 emit_label ((rtx) *elt);
3959 /* Java emits line number notes in the top of labels.
3960 ??? Make this go away once line number notes are obsoleted. */
3961 BB_HEAD (bb) = NEXT_INSN (last);
3962 if (NOTE_P (BB_HEAD (bb)))
3963 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3964 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3966 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3968 else
3969 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3971 NOTE_BASIC_BLOCK (note) = bb;
3973 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3975 basic_block new_bb;
3977 stmt = gsi_stmt (gsi);
3979 /* If this statement is a non-debug one, and we generate debug
3980 insns, then this one might be the last real use of a TERed
3981 SSA_NAME, but where there are still some debug uses further
3982 down. Expanding the current SSA name in such further debug
3983 uses by their RHS might lead to wrong debug info, as coalescing
3984 might make the operands of such RHS be placed into the same
3985 pseudo as something else. Like so:
3986 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3987 use(a_1);
3988 a_2 = ...
3989 #DEBUG ... => a_1
3990 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3991 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3992 the write to a_2 would actually have clobbered the place which
3993 formerly held a_0.
3995 So, instead of that, we recognize the situation, and generate
3996 debug temporaries at the last real use of TERed SSA names:
3997 a_1 = a_0 + 1;
3998 #DEBUG #D1 => a_1
3999 use(a_1);
4000 a_2 = ...
4001 #DEBUG ... => #D1
4003 if (MAY_HAVE_DEBUG_INSNS
4004 && SA.values
4005 && !is_gimple_debug (stmt))
4007 ssa_op_iter iter;
4008 tree op;
4009 gimple def;
4011 location_t sloc = curr_insn_location ();
4013 /* Look for SSA names that have their last use here (TERed
4014 names always have only one real use). */
4015 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4016 if ((def = get_gimple_for_ssa_name (op)))
4018 imm_use_iterator imm_iter;
4019 use_operand_p use_p;
4020 bool have_debug_uses = false;
4022 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4024 if (gimple_debug_bind_p (USE_STMT (use_p)))
4026 have_debug_uses = true;
4027 break;
4031 if (have_debug_uses)
4033 /* OP is a TERed SSA name, with DEF it's defining
4034 statement, and where OP is used in further debug
4035 instructions. Generate a debug temporary, and
4036 replace all uses of OP in debug insns with that
4037 temporary. */
4038 gimple debugstmt;
4039 tree value = gimple_assign_rhs_to_tree (def);
4040 tree vexpr = make_node (DEBUG_EXPR_DECL);
4041 rtx val;
4042 enum machine_mode mode;
4044 set_curr_insn_location (gimple_location (def));
4046 DECL_ARTIFICIAL (vexpr) = 1;
4047 TREE_TYPE (vexpr) = TREE_TYPE (value);
4048 if (DECL_P (value))
4049 mode = DECL_MODE (value);
4050 else
4051 mode = TYPE_MODE (TREE_TYPE (value));
4052 DECL_MODE (vexpr) = mode;
4054 val = gen_rtx_VAR_LOCATION
4055 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4057 emit_debug_insn (val);
4059 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4061 if (!gimple_debug_bind_p (debugstmt))
4062 continue;
4064 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4065 SET_USE (use_p, vexpr);
4067 update_stmt (debugstmt);
4071 set_curr_insn_location (sloc);
4074 currently_expanding_gimple_stmt = stmt;
4076 /* Expand this statement, then evaluate the resulting RTL and
4077 fixup the CFG accordingly. */
4078 if (gimple_code (stmt) == GIMPLE_COND)
4080 new_bb = expand_gimple_cond (bb, stmt);
4081 if (new_bb)
4082 return new_bb;
4084 else if (gimple_debug_bind_p (stmt))
4086 location_t sloc = curr_insn_location ();
4087 gimple_stmt_iterator nsi = gsi;
4089 for (;;)
4091 tree var = gimple_debug_bind_get_var (stmt);
4092 tree value;
4093 rtx val;
4094 enum machine_mode mode;
4096 if (TREE_CODE (var) != DEBUG_EXPR_DECL
4097 && TREE_CODE (var) != LABEL_DECL
4098 && !target_for_debug_bind (var))
4099 goto delink_debug_stmt;
4101 if (gimple_debug_bind_has_value_p (stmt))
4102 value = gimple_debug_bind_get_value (stmt);
4103 else
4104 value = NULL_TREE;
4106 last = get_last_insn ();
4108 set_curr_insn_location (gimple_location (stmt));
4110 if (DECL_P (var))
4111 mode = DECL_MODE (var);
4112 else
4113 mode = TYPE_MODE (TREE_TYPE (var));
4115 val = gen_rtx_VAR_LOCATION
4116 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4118 emit_debug_insn (val);
4120 if (dump_file && (dump_flags & TDF_DETAILS))
4122 /* We can't dump the insn with a TREE where an RTX
4123 is expected. */
4124 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4125 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4126 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4129 delink_debug_stmt:
4130 /* In order not to generate too many debug temporaries,
4131 we delink all uses of debug statements we already expanded.
4132 Therefore debug statements between definition and real
4133 use of TERed SSA names will continue to use the SSA name,
4134 and not be replaced with debug temps. */
4135 delink_stmt_imm_use (stmt);
4137 gsi = nsi;
4138 gsi_next (&nsi);
4139 if (gsi_end_p (nsi))
4140 break;
4141 stmt = gsi_stmt (nsi);
4142 if (!gimple_debug_bind_p (stmt))
4143 break;
4146 set_curr_insn_location (sloc);
4148 else if (gimple_debug_source_bind_p (stmt))
4150 location_t sloc = curr_insn_location ();
4151 tree var = gimple_debug_source_bind_get_var (stmt);
4152 tree value = gimple_debug_source_bind_get_value (stmt);
4153 rtx val;
4154 enum machine_mode mode;
4156 last = get_last_insn ();
4158 set_curr_insn_location (gimple_location (stmt));
4160 mode = DECL_MODE (var);
4162 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4163 VAR_INIT_STATUS_UNINITIALIZED);
4165 emit_debug_insn (val);
4167 if (dump_file && (dump_flags & TDF_DETAILS))
4169 /* We can't dump the insn with a TREE where an RTX
4170 is expected. */
4171 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4172 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4173 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4176 set_curr_insn_location (sloc);
4178 else
4180 if (is_gimple_call (stmt)
4181 && gimple_call_tail_p (stmt)
4182 && disable_tail_calls)
4183 gimple_call_set_tail (stmt, false);
4185 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4187 bool can_fallthru;
4188 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4189 if (new_bb)
4191 if (can_fallthru)
4192 bb = new_bb;
4193 else
4194 return new_bb;
4197 else
4199 def_operand_p def_p;
4200 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4202 if (def_p != NULL)
4204 /* Ignore this stmt if it is in the list of
4205 replaceable expressions. */
4206 if (SA.values
4207 && bitmap_bit_p (SA.values,
4208 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4209 continue;
4211 last = expand_gimple_stmt (stmt);
4212 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4217 currently_expanding_gimple_stmt = NULL;
4219 /* Expand implicit goto and convert goto_locus. */
4220 FOR_EACH_EDGE (e, ei, bb->succs)
4222 if (e->goto_locus != UNKNOWN_LOCATION)
4223 set_curr_insn_location (e->goto_locus);
4224 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4226 emit_jump (label_rtx_for_bb (e->dest));
4227 e->flags &= ~EDGE_FALLTHRU;
4231 /* Expanded RTL can create a jump in the last instruction of block.
4232 This later might be assumed to be a jump to successor and break edge insertion.
4233 We need to insert dummy move to prevent this. PR41440. */
4234 if (single_succ_p (bb)
4235 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4236 && (last = get_last_insn ())
4237 && JUMP_P (last))
4239 rtx dummy = gen_reg_rtx (SImode);
4240 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4243 do_pending_stack_adjust ();
4245 /* Find the block tail. The last insn in the block is the insn
4246 before a barrier and/or table jump insn. */
4247 last = get_last_insn ();
4248 if (BARRIER_P (last))
4249 last = PREV_INSN (last);
4250 if (JUMP_TABLE_DATA_P (last))
4251 last = PREV_INSN (PREV_INSN (last));
4252 BB_END (bb) = last;
4254 update_bb_for_insn (bb);
4256 return bb;
4260 /* Create a basic block for initialization code. */
4262 static basic_block
4263 construct_init_block (void)
4265 basic_block init_block, first_block;
4266 edge e = NULL;
4267 int flags;
4269 /* Multiple entry points not supported yet. */
4270 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4271 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4272 init_rtl_bb_info (EXIT_BLOCK_PTR);
4273 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4274 EXIT_BLOCK_PTR->flags |= BB_RTL;
4276 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4278 /* When entry edge points to first basic block, we don't need jump,
4279 otherwise we have to jump into proper target. */
4280 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4282 tree label = gimple_block_label (e->dest);
4284 emit_jump (label_rtx (label));
4285 flags = 0;
4287 else
4288 flags = EDGE_FALLTHRU;
4290 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4291 get_last_insn (),
4292 ENTRY_BLOCK_PTR);
4293 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4294 init_block->count = ENTRY_BLOCK_PTR->count;
4295 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4296 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4297 if (e)
4299 first_block = e->dest;
4300 redirect_edge_succ (e, init_block);
4301 e = make_edge (init_block, first_block, flags);
4303 else
4304 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4305 e->probability = REG_BR_PROB_BASE;
4306 e->count = ENTRY_BLOCK_PTR->count;
4308 update_bb_for_insn (init_block);
4309 return init_block;
4312 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4313 found in the block tree. */
4315 static void
4316 set_block_levels (tree block, int level)
4318 while (block)
4320 BLOCK_NUMBER (block) = level;
4321 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4322 block = BLOCK_CHAIN (block);
4326 /* Create a block containing landing pads and similar stuff. */
4328 static void
4329 construct_exit_block (void)
4331 rtx head = get_last_insn ();
4332 rtx end;
4333 basic_block exit_block;
4334 edge e, e2;
4335 unsigned ix;
4336 edge_iterator ei;
4337 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4339 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4341 /* Make sure the locus is set to the end of the function, so that
4342 epilogue line numbers and warnings are set properly. */
4343 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
4344 input_location = cfun->function_end_locus;
4346 /* Generate rtl for function exit. */
4347 expand_function_end ();
4349 end = get_last_insn ();
4350 if (head == end)
4351 return;
4352 /* While emitting the function end we could move end of the last basic block.
4354 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4355 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4356 head = NEXT_INSN (head);
4357 exit_block = create_basic_block (NEXT_INSN (head), end,
4358 EXIT_BLOCK_PTR->prev_bb);
4359 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4360 exit_block->count = EXIT_BLOCK_PTR->count;
4361 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4362 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4364 ix = 0;
4365 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4367 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4368 if (!(e->flags & EDGE_ABNORMAL))
4369 redirect_edge_succ (e, exit_block);
4370 else
4371 ix++;
4374 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4375 e->probability = REG_BR_PROB_BASE;
4376 e->count = EXIT_BLOCK_PTR->count;
4377 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4378 if (e2 != e)
4380 e->count -= e2->count;
4381 exit_block->count -= e2->count;
4382 exit_block->frequency -= EDGE_FREQUENCY (e2);
4384 if (e->count < 0)
4385 e->count = 0;
4386 if (exit_block->count < 0)
4387 exit_block->count = 0;
4388 if (exit_block->frequency < 0)
4389 exit_block->frequency = 0;
4390 update_bb_for_insn (exit_block);
4393 /* Helper function for discover_nonconstant_array_refs.
4394 Look for ARRAY_REF nodes with non-constant indexes and mark them
4395 addressable. */
4397 static tree
4398 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4399 void *data ATTRIBUTE_UNUSED)
4401 tree t = *tp;
4403 if (IS_TYPE_OR_DECL_P (t))
4404 *walk_subtrees = 0;
4405 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4407 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4408 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4409 && (!TREE_OPERAND (t, 2)
4410 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4411 || (TREE_CODE (t) == COMPONENT_REF
4412 && (!TREE_OPERAND (t,2)
4413 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4414 || TREE_CODE (t) == BIT_FIELD_REF
4415 || TREE_CODE (t) == REALPART_EXPR
4416 || TREE_CODE (t) == IMAGPART_EXPR
4417 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4418 || CONVERT_EXPR_P (t))
4419 t = TREE_OPERAND (t, 0);
4421 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4423 t = get_base_address (t);
4424 if (t && DECL_P (t)
4425 && DECL_MODE (t) != BLKmode)
4426 TREE_ADDRESSABLE (t) = 1;
4429 *walk_subtrees = 0;
4432 return NULL_TREE;
4435 /* RTL expansion is not able to compile array references with variable
4436 offsets for arrays stored in single register. Discover such
4437 expressions and mark variables as addressable to avoid this
4438 scenario. */
4440 static void
4441 discover_nonconstant_array_refs (void)
4443 basic_block bb;
4444 gimple_stmt_iterator gsi;
4446 FOR_EACH_BB (bb)
4447 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4449 gimple stmt = gsi_stmt (gsi);
4450 if (!is_gimple_debug (stmt))
4451 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4455 /* This function sets crtl->args.internal_arg_pointer to a virtual
4456 register if DRAP is needed. Local register allocator will replace
4457 virtual_incoming_args_rtx with the virtual register. */
4459 static void
4460 expand_stack_alignment (void)
4462 rtx drap_rtx;
4463 unsigned int preferred_stack_boundary;
4465 if (! SUPPORTS_STACK_ALIGNMENT)
4466 return;
4468 if (cfun->calls_alloca
4469 || cfun->has_nonlocal_label
4470 || crtl->has_nonlocal_goto)
4471 crtl->need_drap = true;
4473 /* Call update_stack_boundary here again to update incoming stack
4474 boundary. It may set incoming stack alignment to a different
4475 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4476 use the minimum incoming stack alignment to check if it is OK
4477 to perform sibcall optimization since sibcall optimization will
4478 only align the outgoing stack to incoming stack boundary. */
4479 if (targetm.calls.update_stack_boundary)
4480 targetm.calls.update_stack_boundary ();
4482 /* The incoming stack frame has to be aligned at least at
4483 parm_stack_boundary. */
4484 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4486 /* Update crtl->stack_alignment_estimated and use it later to align
4487 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4488 exceptions since callgraph doesn't collect incoming stack alignment
4489 in this case. */
4490 if (cfun->can_throw_non_call_exceptions
4491 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4492 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4493 else
4494 preferred_stack_boundary = crtl->preferred_stack_boundary;
4495 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4496 crtl->stack_alignment_estimated = preferred_stack_boundary;
4497 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4498 crtl->stack_alignment_needed = preferred_stack_boundary;
4500 gcc_assert (crtl->stack_alignment_needed
4501 <= crtl->stack_alignment_estimated);
4503 crtl->stack_realign_needed
4504 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4505 crtl->stack_realign_tried = crtl->stack_realign_needed;
4507 crtl->stack_realign_processed = true;
4509 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4510 alignment. */
4511 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4512 drap_rtx = targetm.calls.get_drap_rtx ();
4514 /* stack_realign_drap and drap_rtx must match. */
4515 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4517 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4518 if (NULL != drap_rtx)
4520 crtl->args.internal_arg_pointer = drap_rtx;
4522 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4523 needed. */
4524 fixup_tail_calls ();
4528 /* Translate the intermediate representation contained in the CFG
4529 from GIMPLE trees to RTL.
4531 We do conversion per basic block and preserve/update the tree CFG.
4532 This implies we have to do some magic as the CFG can simultaneously
4533 consist of basic blocks containing RTL and GIMPLE trees. This can
4534 confuse the CFG hooks, so be careful to not manipulate CFG during
4535 the expansion. */
4537 static unsigned int
4538 gimple_expand_cfg (void)
4540 basic_block bb, init_block;
4541 sbitmap blocks;
4542 edge_iterator ei;
4543 edge e;
4544 rtx var_seq, var_ret_seq;
4545 unsigned i;
4547 timevar_push (TV_OUT_OF_SSA);
4548 rewrite_out_of_ssa (&SA);
4549 timevar_pop (TV_OUT_OF_SSA);
4550 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4552 /* Make sure all values used by the optimization passes have sane
4553 defaults. */
4554 reg_renumber = 0;
4556 /* Some backends want to know that we are expanding to RTL. */
4557 currently_expanding_to_rtl = 1;
4558 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4559 free_dominance_info (CDI_DOMINATORS);
4561 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4563 insn_locations_init ();
4564 if (!DECL_IS_BUILTIN (current_function_decl))
4566 /* Eventually, all FEs should explicitly set function_start_locus. */
4567 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
4568 set_curr_insn_location
4569 (DECL_SOURCE_LOCATION (current_function_decl));
4570 else
4571 set_curr_insn_location (cfun->function_start_locus);
4573 else
4574 set_curr_insn_location (UNKNOWN_LOCATION);
4575 prologue_location = curr_insn_location ();
4577 #ifdef INSN_SCHEDULING
4578 init_sched_attrs ();
4579 #endif
4581 /* Make sure first insn is a note even if we don't want linenums.
4582 This makes sure the first insn will never be deleted.
4583 Also, final expects a note to appear there. */
4584 emit_note (NOTE_INSN_DELETED);
4586 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4587 discover_nonconstant_array_refs ();
4589 targetm.expand_to_rtl_hook ();
4590 crtl->stack_alignment_needed = STACK_BOUNDARY;
4591 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4592 crtl->stack_alignment_estimated = 0;
4593 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4594 cfun->cfg->max_jumptable_ents = 0;
4596 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4597 of the function section at exapnsion time to predict distance of calls. */
4598 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4600 /* Expand the variables recorded during gimple lowering. */
4601 timevar_push (TV_VAR_EXPAND);
4602 start_sequence ();
4604 var_ret_seq = expand_used_vars ();
4606 var_seq = get_insns ();
4607 end_sequence ();
4608 timevar_pop (TV_VAR_EXPAND);
4610 /* Honor stack protection warnings. */
4611 if (warn_stack_protect)
4613 if (cfun->calls_alloca)
4614 warning (OPT_Wstack_protector,
4615 "stack protector not protecting local variables: "
4616 "variable length buffer");
4617 if (has_short_buffer && !crtl->stack_protect_guard)
4618 warning (OPT_Wstack_protector,
4619 "stack protector not protecting function: "
4620 "all local arrays are less than %d bytes long",
4621 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4624 /* Set up parameters and prepare for return, for the function. */
4625 expand_function_start (current_function_decl);
4627 /* If we emitted any instructions for setting up the variables,
4628 emit them before the FUNCTION_START note. */
4629 if (var_seq)
4631 emit_insn_before (var_seq, parm_birth_insn);
4633 /* In expand_function_end we'll insert the alloca save/restore
4634 before parm_birth_insn. We've just insertted an alloca call.
4635 Adjust the pointer to match. */
4636 parm_birth_insn = var_seq;
4639 /* Now that we also have the parameter RTXs, copy them over to our
4640 partitions. */
4641 for (i = 0; i < SA.map->num_partitions; i++)
4643 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4645 if (TREE_CODE (var) != VAR_DECL
4646 && !SA.partition_to_pseudo[i])
4647 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4648 gcc_assert (SA.partition_to_pseudo[i]);
4650 /* If this decl was marked as living in multiple places, reset
4651 this now to NULL. */
4652 if (DECL_RTL_IF_SET (var) == pc_rtx)
4653 SET_DECL_RTL (var, NULL);
4655 /* Some RTL parts really want to look at DECL_RTL(x) when x
4656 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4657 SET_DECL_RTL here making this available, but that would mean
4658 to select one of the potentially many RTLs for one DECL. Instead
4659 of doing that we simply reset the MEM_EXPR of the RTL in question,
4660 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4661 if (!DECL_RTL_SET_P (var))
4663 if (MEM_P (SA.partition_to_pseudo[i]))
4664 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4668 /* If we have a class containing differently aligned pointers
4669 we need to merge those into the corresponding RTL pointer
4670 alignment. */
4671 for (i = 1; i < num_ssa_names; i++)
4673 tree name = ssa_name (i);
4674 int part;
4675 rtx r;
4677 if (!name
4678 /* We might have generated new SSA names in
4679 update_alias_info_with_stack_vars. They will have a NULL
4680 defining statements, and won't be part of the partitioning,
4681 so ignore those. */
4682 || !SSA_NAME_DEF_STMT (name))
4683 continue;
4684 part = var_to_partition (SA.map, name);
4685 if (part == NO_PARTITION)
4686 continue;
4688 /* Adjust all partition members to get the underlying decl of
4689 the representative which we might have created in expand_one_var. */
4690 if (SSA_NAME_VAR (name) == NULL_TREE)
4692 tree leader = partition_to_var (SA.map, part);
4693 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4694 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4696 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4697 continue;
4699 r = SA.partition_to_pseudo[part];
4700 if (REG_P (r))
4701 mark_reg_pointer (r, get_pointer_alignment (name));
4704 /* If this function is `main', emit a call to `__main'
4705 to run global initializers, etc. */
4706 if (DECL_NAME (current_function_decl)
4707 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4708 && DECL_FILE_SCOPE_P (current_function_decl))
4709 expand_main_function ();
4711 /* Initialize the stack_protect_guard field. This must happen after the
4712 call to __main (if any) so that the external decl is initialized. */
4713 if (crtl->stack_protect_guard)
4714 stack_protect_prologue ();
4716 expand_phi_nodes (&SA);
4718 /* Register rtl specific functions for cfg. */
4719 rtl_register_cfg_hooks ();
4721 init_block = construct_init_block ();
4723 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4724 remaining edges later. */
4725 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4726 e->flags &= ~EDGE_EXECUTABLE;
4728 lab_rtx_for_bb = pointer_map_create ();
4729 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4730 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
4732 if (MAY_HAVE_DEBUG_INSNS)
4733 expand_debug_locations ();
4735 /* Free stuff we no longer need after GIMPLE optimizations. */
4736 free_dominance_info (CDI_DOMINATORS);
4737 free_dominance_info (CDI_POST_DOMINATORS);
4738 delete_tree_cfg_annotations ();
4740 timevar_push (TV_OUT_OF_SSA);
4741 finish_out_of_ssa (&SA);
4742 timevar_pop (TV_OUT_OF_SSA);
4744 timevar_push (TV_POST_EXPAND);
4745 /* We are no longer in SSA form. */
4746 cfun->gimple_df->in_ssa_p = false;
4747 if (current_loops)
4748 loops_state_clear (LOOP_CLOSED_SSA);
4750 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4751 conservatively to true until they are all profile aware. */
4752 pointer_map_destroy (lab_rtx_for_bb);
4753 free_histograms ();
4755 construct_exit_block ();
4756 insn_locations_finalize ();
4758 if (var_ret_seq)
4760 rtx after = return_label;
4761 rtx next = NEXT_INSN (after);
4762 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
4763 after = next;
4764 emit_insn_after (var_ret_seq, after);
4767 /* Zap the tree EH table. */
4768 set_eh_throw_stmt_table (cfun, NULL);
4770 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4771 split edges which edge insertions might do. */
4772 rebuild_jump_labels (get_insns ());
4774 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4776 edge e;
4777 edge_iterator ei;
4778 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4780 if (e->insns.r)
4782 rebuild_jump_labels_chain (e->insns.r);
4783 /* Avoid putting insns before parm_birth_insn. */
4784 if (e->src == ENTRY_BLOCK_PTR
4785 && single_succ_p (ENTRY_BLOCK_PTR)
4786 && parm_birth_insn)
4788 rtx insns = e->insns.r;
4789 e->insns.r = NULL_RTX;
4790 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4792 else
4793 commit_one_edge_insertion (e);
4795 else
4796 ei_next (&ei);
4800 /* We're done expanding trees to RTL. */
4801 currently_expanding_to_rtl = 0;
4803 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4805 edge e;
4806 edge_iterator ei;
4807 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4809 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4810 e->flags &= ~EDGE_EXECUTABLE;
4812 /* At the moment not all abnormal edges match the RTL
4813 representation. It is safe to remove them here as
4814 find_many_sub_basic_blocks will rediscover them.
4815 In the future we should get this fixed properly. */
4816 if ((e->flags & EDGE_ABNORMAL)
4817 && !(e->flags & EDGE_SIBCALL))
4818 remove_edge (e);
4819 else
4820 ei_next (&ei);
4824 blocks = sbitmap_alloc (last_basic_block);
4825 bitmap_ones (blocks);
4826 find_many_sub_basic_blocks (blocks);
4827 sbitmap_free (blocks);
4828 purge_all_dead_edges ();
4830 expand_stack_alignment ();
4832 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4833 function. */
4834 if (crtl->tail_call_emit)
4835 fixup_tail_calls ();
4837 /* After initial rtl generation, call back to finish generating
4838 exception support code. We need to do this before cleaning up
4839 the CFG as the code does not expect dead landing pads. */
4840 if (cfun->eh->region_tree != NULL)
4841 finish_eh_generation ();
4843 /* Remove unreachable blocks, otherwise we cannot compute dominators
4844 which are needed for loop state verification. As a side-effect
4845 this also compacts blocks.
4846 ??? We cannot remove trivially dead insns here as for example
4847 the DRAP reg on i?86 is not magically live at this point.
4848 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4849 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4851 #ifdef ENABLE_CHECKING
4852 verify_flow_info ();
4853 #endif
4855 /* Initialize pseudos allocated for hard registers. */
4856 emit_initial_value_sets ();
4858 /* And finally unshare all RTL. */
4859 unshare_all_rtl ();
4861 /* There's no need to defer outputting this function any more; we
4862 know we want to output it. */
4863 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4865 /* Now that we're done expanding trees to RTL, we shouldn't have any
4866 more CONCATs anywhere. */
4867 generating_concat_p = 0;
4869 if (dump_file)
4871 fprintf (dump_file,
4872 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4873 /* And the pass manager will dump RTL for us. */
4876 /* If we're emitting a nested function, make sure its parent gets
4877 emitted as well. Doing otherwise confuses debug info. */
4879 tree parent;
4880 for (parent = DECL_CONTEXT (current_function_decl);
4881 parent != NULL_TREE;
4882 parent = get_containing_scope (parent))
4883 if (TREE_CODE (parent) == FUNCTION_DECL)
4884 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4887 /* We are now committed to emitting code for this function. Do any
4888 preparation, such as emitting abstract debug info for the inline
4889 before it gets mangled by optimization. */
4890 if (cgraph_function_possibly_inlined_p (current_function_decl))
4891 (*debug_hooks->outlining_inline_function) (current_function_decl);
4893 TREE_ASM_WRITTEN (current_function_decl) = 1;
4895 /* After expanding, the return labels are no longer needed. */
4896 return_label = NULL;
4897 naked_return_label = NULL;
4899 /* After expanding, the tm_restart map is no longer needed. */
4900 if (cfun->gimple_df->tm_restart)
4902 htab_delete (cfun->gimple_df->tm_restart);
4903 cfun->gimple_df->tm_restart = NULL;
4906 /* Tag the blocks with a depth number so that change_scope can find
4907 the common parent easily. */
4908 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4909 default_rtl_profile ();
4911 timevar_pop (TV_POST_EXPAND);
4913 return 0;
4916 namespace {
4918 const pass_data pass_data_expand =
4920 RTL_PASS, /* type */
4921 "expand", /* name */
4922 OPTGROUP_NONE, /* optinfo_flags */
4923 false, /* has_gate */
4924 true, /* has_execute */
4925 TV_EXPAND, /* tv_id */
4926 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
4927 | PROP_gimple_lcx
4928 | PROP_gimple_lvec ), /* properties_required */
4929 PROP_rtl, /* properties_provided */
4930 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
4931 ( TODO_verify_ssa | TODO_verify_flow
4932 | TODO_verify_stmts ), /* todo_flags_start */
4933 0, /* todo_flags_finish */
4936 class pass_expand : public rtl_opt_pass
4938 public:
4939 pass_expand (gcc::context *ctxt)
4940 : rtl_opt_pass (pass_data_expand, ctxt)
4943 /* opt_pass methods: */
4944 unsigned int execute () { return gimple_expand_cfg (); }
4946 }; // class pass_expand
4948 } // anon namespace
4950 rtl_opt_pass *
4951 make_pass_expand (gcc::context *ctxt)
4953 return new pass_expand (ctxt);