re PR tree-optimization/19831 (Missing DSE/malloc/free optimization)
[official-gcc.git] / gcc / cfgexpand.c
blobc312c37c63486c52626de95f2513494cc5483648
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "langhooks.h"
31 #include "bitmap.h"
32 #include "gimple.h"
33 #include "gimple-ssa.h"
34 #include "cgraph.h"
35 #include "tree-cfg.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-ssanames.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "tree-pass.h"
42 #include "except.h"
43 #include "flags.h"
44 #include "diagnostic.h"
45 #include "gimple-pretty-print.h"
46 #include "toplev.h"
47 #include "debug.h"
48 #include "params.h"
49 #include "tree-inline.h"
50 #include "value-prof.h"
51 #include "target.h"
52 #include "tree-ssa-live.h"
53 #include "tree-outof-ssa.h"
54 #include "sbitmap.h"
55 #include "cfgloop.h"
56 #include "regs.h" /* For reg_renumber. */
57 #include "insn-attr.h" /* For INSN_SCHEDULING. */
58 #include "asan.h"
59 #include "tree-ssa-address.h"
61 /* This variable holds information helping the rewriting of SSA trees
62 into RTL. */
63 struct ssaexpand SA;
65 /* This variable holds the currently expanded gimple statement for purposes
66 of comminucating the profile info to the builtin expanders. */
67 gimple currently_expanding_gimple_stmt;
69 static rtx expand_debug_expr (tree);
71 /* Return an expression tree corresponding to the RHS of GIMPLE
72 statement STMT. */
74 tree
75 gimple_assign_rhs_to_tree (gimple stmt)
77 tree t;
78 enum gimple_rhs_class grhs_class;
80 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
82 if (grhs_class == GIMPLE_TERNARY_RHS)
83 t = build3 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt),
86 gimple_assign_rhs2 (stmt),
87 gimple_assign_rhs3 (stmt));
88 else if (grhs_class == GIMPLE_BINARY_RHS)
89 t = build2 (gimple_assign_rhs_code (stmt),
90 TREE_TYPE (gimple_assign_lhs (stmt)),
91 gimple_assign_rhs1 (stmt),
92 gimple_assign_rhs2 (stmt));
93 else if (grhs_class == GIMPLE_UNARY_RHS)
94 t = build1 (gimple_assign_rhs_code (stmt),
95 TREE_TYPE (gimple_assign_lhs (stmt)),
96 gimple_assign_rhs1 (stmt));
97 else if (grhs_class == GIMPLE_SINGLE_RHS)
99 t = gimple_assign_rhs1 (stmt);
100 /* Avoid modifying this tree in place below. */
101 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
102 && gimple_location (stmt) != EXPR_LOCATION (t))
103 || (gimple_block (stmt)
104 && currently_expanding_to_rtl
105 && EXPR_P (t)))
106 t = copy_node (t);
108 else
109 gcc_unreachable ();
111 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
112 SET_EXPR_LOCATION (t, gimple_location (stmt));
114 return t;
118 #ifndef STACK_ALIGNMENT_NEEDED
119 #define STACK_ALIGNMENT_NEEDED 1
120 #endif
122 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
124 /* Associate declaration T with storage space X. If T is no
125 SSA name this is exactly SET_DECL_RTL, otherwise make the
126 partition of T associated with X. */
127 static inline void
128 set_rtl (tree t, rtx x)
130 if (TREE_CODE (t) == SSA_NAME)
132 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
133 if (x && !MEM_P (x))
134 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
135 /* For the benefit of debug information at -O0 (where vartracking
136 doesn't run) record the place also in the base DECL if it's
137 a normal variable (not a parameter). */
138 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
140 tree var = SSA_NAME_VAR (t);
141 /* If we don't yet have something recorded, just record it now. */
142 if (!DECL_RTL_SET_P (var))
143 SET_DECL_RTL (var, x);
144 /* If we have it set already to "multiple places" don't
145 change this. */
146 else if (DECL_RTL (var) == pc_rtx)
148 /* If we have something recorded and it's not the same place
149 as we want to record now, we have multiple partitions for the
150 same base variable, with different places. We can't just
151 randomly chose one, hence we have to say that we don't know.
152 This only happens with optimization, and there var-tracking
153 will figure out the right thing. */
154 else if (DECL_RTL (var) != x)
155 SET_DECL_RTL (var, pc_rtx);
158 else
159 SET_DECL_RTL (t, x);
162 /* This structure holds data relevant to one variable that will be
163 placed in a stack slot. */
164 struct stack_var
166 /* The Variable. */
167 tree decl;
169 /* Initially, the size of the variable. Later, the size of the partition,
170 if this variable becomes it's partition's representative. */
171 HOST_WIDE_INT size;
173 /* The *byte* alignment required for this variable. Or as, with the
174 size, the alignment for this partition. */
175 unsigned int alignb;
177 /* The partition representative. */
178 size_t representative;
180 /* The next stack variable in the partition, or EOC. */
181 size_t next;
183 /* The numbers of conflicting stack variables. */
184 bitmap conflicts;
187 #define EOC ((size_t)-1)
189 /* We have an array of such objects while deciding allocation. */
190 static struct stack_var *stack_vars;
191 static size_t stack_vars_alloc;
192 static size_t stack_vars_num;
193 static struct pointer_map_t *decl_to_stack_part;
195 /* Conflict bitmaps go on this obstack. This allows us to destroy
196 all of them in one big sweep. */
197 static bitmap_obstack stack_var_bitmap_obstack;
199 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
200 is non-decreasing. */
201 static size_t *stack_vars_sorted;
203 /* The phase of the stack frame. This is the known misalignment of
204 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
205 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
206 static int frame_phase;
208 /* Used during expand_used_vars to remember if we saw any decls for
209 which we'd like to enable stack smashing protection. */
210 static bool has_protected_decls;
212 /* Used during expand_used_vars. Remember if we say a character buffer
213 smaller than our cutoff threshold. Used for -Wstack-protector. */
214 static bool has_short_buffer;
216 /* Compute the byte alignment to use for DECL. Ignore alignment
217 we can't do with expected alignment of the stack boundary. */
219 static unsigned int
220 align_local_variable (tree decl)
222 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
223 DECL_ALIGN (decl) = align;
224 return align / BITS_PER_UNIT;
227 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
228 Return the frame offset. */
230 static HOST_WIDE_INT
231 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
233 HOST_WIDE_INT offset, new_frame_offset;
235 new_frame_offset = frame_offset;
236 if (FRAME_GROWS_DOWNWARD)
238 new_frame_offset -= size + frame_phase;
239 new_frame_offset &= -align;
240 new_frame_offset += frame_phase;
241 offset = new_frame_offset;
243 else
245 new_frame_offset -= frame_phase;
246 new_frame_offset += align - 1;
247 new_frame_offset &= -align;
248 new_frame_offset += frame_phase;
249 offset = new_frame_offset;
250 new_frame_offset += size;
252 frame_offset = new_frame_offset;
254 if (frame_offset_overflow (frame_offset, cfun->decl))
255 frame_offset = offset = 0;
257 return offset;
260 /* Accumulate DECL into STACK_VARS. */
262 static void
263 add_stack_var (tree decl)
265 struct stack_var *v;
267 if (stack_vars_num >= stack_vars_alloc)
269 if (stack_vars_alloc)
270 stack_vars_alloc = stack_vars_alloc * 3 / 2;
271 else
272 stack_vars_alloc = 32;
273 stack_vars
274 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
276 if (!decl_to_stack_part)
277 decl_to_stack_part = pointer_map_create ();
279 v = &stack_vars[stack_vars_num];
280 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
282 v->decl = decl;
283 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
284 /* Ensure that all variables have size, so that &a != &b for any two
285 variables that are simultaneously live. */
286 if (v->size == 0)
287 v->size = 1;
288 v->alignb = align_local_variable (SSAVAR (decl));
289 /* An alignment of zero can mightily confuse us later. */
290 gcc_assert (v->alignb != 0);
292 /* All variables are initially in their own partition. */
293 v->representative = stack_vars_num;
294 v->next = EOC;
296 /* All variables initially conflict with no other. */
297 v->conflicts = NULL;
299 /* Ensure that this decl doesn't get put onto the list twice. */
300 set_rtl (decl, pc_rtx);
302 stack_vars_num++;
305 /* Make the decls associated with luid's X and Y conflict. */
307 static void
308 add_stack_var_conflict (size_t x, size_t y)
310 struct stack_var *a = &stack_vars[x];
311 struct stack_var *b = &stack_vars[y];
312 if (!a->conflicts)
313 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
314 if (!b->conflicts)
315 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
316 bitmap_set_bit (a->conflicts, y);
317 bitmap_set_bit (b->conflicts, x);
320 /* Check whether the decls associated with luid's X and Y conflict. */
322 static bool
323 stack_var_conflict_p (size_t x, size_t y)
325 struct stack_var *a = &stack_vars[x];
326 struct stack_var *b = &stack_vars[y];
327 if (x == y)
328 return false;
329 /* Partitions containing an SSA name result from gimple registers
330 with things like unsupported modes. They are top-level and
331 hence conflict with everything else. */
332 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
333 return true;
335 if (!a->conflicts || !b->conflicts)
336 return false;
337 return bitmap_bit_p (a->conflicts, y);
340 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
341 enter its partition number into bitmap DATA. */
343 static bool
344 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
346 bitmap active = (bitmap)data;
347 op = get_base_address (op);
348 if (op
349 && DECL_P (op)
350 && DECL_RTL_IF_SET (op) == pc_rtx)
352 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
353 if (v)
354 bitmap_set_bit (active, *v);
356 return false;
359 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
360 record conflicts between it and all currently active other partitions
361 from bitmap DATA. */
363 static bool
364 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
366 bitmap active = (bitmap)data;
367 op = get_base_address (op);
368 if (op
369 && DECL_P (op)
370 && DECL_RTL_IF_SET (op) == pc_rtx)
372 size_t *v =
373 (size_t *) pointer_map_contains (decl_to_stack_part, op);
374 if (v && bitmap_set_bit (active, *v))
376 size_t num = *v;
377 bitmap_iterator bi;
378 unsigned i;
379 gcc_assert (num < stack_vars_num);
380 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
381 add_stack_var_conflict (num, i);
384 return false;
387 /* Helper routine for add_scope_conflicts, calculating the active partitions
388 at the end of BB, leaving the result in WORK. We're called to generate
389 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
390 liveness. */
392 static void
393 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
395 edge e;
396 edge_iterator ei;
397 gimple_stmt_iterator gsi;
398 bool (*visit)(gimple, tree, void *);
400 bitmap_clear (work);
401 FOR_EACH_EDGE (e, ei, bb->preds)
402 bitmap_ior_into (work, (bitmap)e->src->aux);
404 visit = visit_op;
406 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
408 gimple stmt = gsi_stmt (gsi);
409 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
411 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
413 gimple stmt = gsi_stmt (gsi);
415 if (gimple_clobber_p (stmt))
417 tree lhs = gimple_assign_lhs (stmt);
418 size_t *v;
419 /* Nested function lowering might introduce LHSs
420 that are COMPONENT_REFs. */
421 if (TREE_CODE (lhs) != VAR_DECL)
422 continue;
423 if (DECL_RTL_IF_SET (lhs) == pc_rtx
424 && (v = (size_t *)
425 pointer_map_contains (decl_to_stack_part, lhs)))
426 bitmap_clear_bit (work, *v);
428 else if (!is_gimple_debug (stmt))
430 if (for_conflict
431 && visit == visit_op)
433 /* If this is the first real instruction in this BB we need
434 to add conflicts for everything live at this point now.
435 Unlike classical liveness for named objects we can't
436 rely on seeing a def/use of the names we're interested in.
437 There might merely be indirect loads/stores. We'd not add any
438 conflicts for such partitions. */
439 bitmap_iterator bi;
440 unsigned i;
441 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
443 struct stack_var *a = &stack_vars[i];
444 if (!a->conflicts)
445 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
446 bitmap_ior_into (a->conflicts, work);
448 visit = visit_conflict;
450 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
455 /* Generate stack partition conflicts between all partitions that are
456 simultaneously live. */
458 static void
459 add_scope_conflicts (void)
461 basic_block bb;
462 bool changed;
463 bitmap work = BITMAP_ALLOC (NULL);
464 int *rpo;
465 int n_bbs;
467 /* We approximate the live range of a stack variable by taking the first
468 mention of its name as starting point(s), and by the end-of-scope
469 death clobber added by gimplify as ending point(s) of the range.
470 This overapproximates in the case we for instance moved an address-taken
471 operation upward, without also moving a dereference to it upwards.
472 But it's conservatively correct as a variable never can hold values
473 before its name is mentioned at least once.
475 We then do a mostly classical bitmap liveness algorithm. */
477 FOR_ALL_BB (bb)
478 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
480 rpo = XNEWVEC (int, last_basic_block);
481 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
483 changed = true;
484 while (changed)
486 int i;
487 changed = false;
488 for (i = 0; i < n_bbs; i++)
490 bitmap active;
491 bb = BASIC_BLOCK (rpo[i]);
492 active = (bitmap)bb->aux;
493 add_scope_conflicts_1 (bb, work, false);
494 if (bitmap_ior_into (active, work))
495 changed = true;
499 FOR_EACH_BB (bb)
500 add_scope_conflicts_1 (bb, work, true);
502 free (rpo);
503 BITMAP_FREE (work);
504 FOR_ALL_BB (bb)
505 BITMAP_FREE (bb->aux);
508 /* A subroutine of partition_stack_vars. A comparison function for qsort,
509 sorting an array of indices by the properties of the object. */
511 static int
512 stack_var_cmp (const void *a, const void *b)
514 size_t ia = *(const size_t *)a;
515 size_t ib = *(const size_t *)b;
516 unsigned int aligna = stack_vars[ia].alignb;
517 unsigned int alignb = stack_vars[ib].alignb;
518 HOST_WIDE_INT sizea = stack_vars[ia].size;
519 HOST_WIDE_INT sizeb = stack_vars[ib].size;
520 tree decla = stack_vars[ia].decl;
521 tree declb = stack_vars[ib].decl;
522 bool largea, largeb;
523 unsigned int uida, uidb;
525 /* Primary compare on "large" alignment. Large comes first. */
526 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
527 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
528 if (largea != largeb)
529 return (int)largeb - (int)largea;
531 /* Secondary compare on size, decreasing */
532 if (sizea > sizeb)
533 return -1;
534 if (sizea < sizeb)
535 return 1;
537 /* Tertiary compare on true alignment, decreasing. */
538 if (aligna < alignb)
539 return -1;
540 if (aligna > alignb)
541 return 1;
543 /* Final compare on ID for sort stability, increasing.
544 Two SSA names are compared by their version, SSA names come before
545 non-SSA names, and two normal decls are compared by their DECL_UID. */
546 if (TREE_CODE (decla) == SSA_NAME)
548 if (TREE_CODE (declb) == SSA_NAME)
549 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
550 else
551 return -1;
553 else if (TREE_CODE (declb) == SSA_NAME)
554 return 1;
555 else
556 uida = DECL_UID (decla), uidb = DECL_UID (declb);
557 if (uida < uidb)
558 return 1;
559 if (uida > uidb)
560 return -1;
561 return 0;
565 /* If the points-to solution *PI points to variables that are in a partition
566 together with other variables add all partition members to the pointed-to
567 variables bitmap. */
569 static void
570 add_partitioned_vars_to_ptset (struct pt_solution *pt,
571 struct pointer_map_t *decls_to_partitions,
572 struct pointer_set_t *visited, bitmap temp)
574 bitmap_iterator bi;
575 unsigned i;
576 bitmap *part;
578 if (pt->anything
579 || pt->vars == NULL
580 /* The pointed-to vars bitmap is shared, it is enough to
581 visit it once. */
582 || pointer_set_insert (visited, pt->vars))
583 return;
585 bitmap_clear (temp);
587 /* By using a temporary bitmap to store all members of the partitions
588 we have to add we make sure to visit each of the partitions only
589 once. */
590 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
591 if ((!temp
592 || !bitmap_bit_p (temp, i))
593 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
594 (void *)(size_t) i)))
595 bitmap_ior_into (temp, *part);
596 if (!bitmap_empty_p (temp))
597 bitmap_ior_into (pt->vars, temp);
600 /* Update points-to sets based on partition info, so we can use them on RTL.
601 The bitmaps representing stack partitions will be saved until expand,
602 where partitioned decls used as bases in memory expressions will be
603 rewritten. */
605 static void
606 update_alias_info_with_stack_vars (void)
608 struct pointer_map_t *decls_to_partitions = NULL;
609 size_t i, j;
610 tree var = NULL_TREE;
612 for (i = 0; i < stack_vars_num; i++)
614 bitmap part = NULL;
615 tree name;
616 struct ptr_info_def *pi;
618 /* Not interested in partitions with single variable. */
619 if (stack_vars[i].representative != i
620 || stack_vars[i].next == EOC)
621 continue;
623 if (!decls_to_partitions)
625 decls_to_partitions = pointer_map_create ();
626 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
629 /* Create an SSA_NAME that points to the partition for use
630 as base during alias-oracle queries on RTL for bases that
631 have been partitioned. */
632 if (var == NULL_TREE)
633 var = create_tmp_var (ptr_type_node, NULL);
634 name = make_ssa_name (var, NULL);
636 /* Create bitmaps representing partitions. They will be used for
637 points-to sets later, so use GGC alloc. */
638 part = BITMAP_GGC_ALLOC ();
639 for (j = i; j != EOC; j = stack_vars[j].next)
641 tree decl = stack_vars[j].decl;
642 unsigned int uid = DECL_PT_UID (decl);
643 bitmap_set_bit (part, uid);
644 *((bitmap *) pointer_map_insert (decls_to_partitions,
645 (void *)(size_t) uid)) = part;
646 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
647 decl)) = name;
648 if (TREE_ADDRESSABLE (decl))
649 TREE_ADDRESSABLE (name) = 1;
652 /* Make the SSA name point to all partition members. */
653 pi = get_ptr_info (name);
654 pt_solution_set (&pi->pt, part, false);
657 /* Make all points-to sets that contain one member of a partition
658 contain all members of the partition. */
659 if (decls_to_partitions)
661 unsigned i;
662 struct pointer_set_t *visited = pointer_set_create ();
663 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
665 for (i = 1; i < num_ssa_names; i++)
667 tree name = ssa_name (i);
668 struct ptr_info_def *pi;
670 if (name
671 && POINTER_TYPE_P (TREE_TYPE (name))
672 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
673 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
674 visited, temp);
677 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
678 decls_to_partitions, visited, temp);
680 pointer_set_destroy (visited);
681 pointer_map_destroy (decls_to_partitions);
682 BITMAP_FREE (temp);
686 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
687 partitioning algorithm. Partitions A and B are known to be non-conflicting.
688 Merge them into a single partition A. */
690 static void
691 union_stack_vars (size_t a, size_t b)
693 struct stack_var *vb = &stack_vars[b];
694 bitmap_iterator bi;
695 unsigned u;
697 gcc_assert (stack_vars[b].next == EOC);
698 /* Add B to A's partition. */
699 stack_vars[b].next = stack_vars[a].next;
700 stack_vars[b].representative = a;
701 stack_vars[a].next = b;
703 /* Update the required alignment of partition A to account for B. */
704 if (stack_vars[a].alignb < stack_vars[b].alignb)
705 stack_vars[a].alignb = stack_vars[b].alignb;
707 /* Update the interference graph and merge the conflicts. */
708 if (vb->conflicts)
710 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
711 add_stack_var_conflict (a, stack_vars[u].representative);
712 BITMAP_FREE (vb->conflicts);
716 /* A subroutine of expand_used_vars. Binpack the variables into
717 partitions constrained by the interference graph. The overall
718 algorithm used is as follows:
720 Sort the objects by size in descending order.
721 For each object A {
722 S = size(A)
723 O = 0
724 loop {
725 Look for the largest non-conflicting object B with size <= S.
726 UNION (A, B)
731 static void
732 partition_stack_vars (void)
734 size_t si, sj, n = stack_vars_num;
736 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
737 for (si = 0; si < n; ++si)
738 stack_vars_sorted[si] = si;
740 if (n == 1)
741 return;
743 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
745 for (si = 0; si < n; ++si)
747 size_t i = stack_vars_sorted[si];
748 unsigned int ialign = stack_vars[i].alignb;
749 HOST_WIDE_INT isize = stack_vars[i].size;
751 /* Ignore objects that aren't partition representatives. If we
752 see a var that is not a partition representative, it must
753 have been merged earlier. */
754 if (stack_vars[i].representative != i)
755 continue;
757 for (sj = si + 1; sj < n; ++sj)
759 size_t j = stack_vars_sorted[sj];
760 unsigned int jalign = stack_vars[j].alignb;
761 HOST_WIDE_INT jsize = stack_vars[j].size;
763 /* Ignore objects that aren't partition representatives. */
764 if (stack_vars[j].representative != j)
765 continue;
767 /* Do not mix objects of "small" (supported) alignment
768 and "large" (unsupported) alignment. */
769 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
770 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
771 break;
773 /* For Address Sanitizer do not mix objects with different
774 sizes, as the shorter vars wouldn't be adequately protected.
775 Don't do that for "large" (unsupported) alignment objects,
776 those aren't protected anyway. */
777 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
778 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
779 break;
781 /* Ignore conflicting objects. */
782 if (stack_var_conflict_p (i, j))
783 continue;
785 /* UNION the objects, placing J at OFFSET. */
786 union_stack_vars (i, j);
790 update_alias_info_with_stack_vars ();
793 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
795 static void
796 dump_stack_var_partition (void)
798 size_t si, i, j, n = stack_vars_num;
800 for (si = 0; si < n; ++si)
802 i = stack_vars_sorted[si];
804 /* Skip variables that aren't partition representatives, for now. */
805 if (stack_vars[i].representative != i)
806 continue;
808 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
809 " align %u\n", (unsigned long) i, stack_vars[i].size,
810 stack_vars[i].alignb);
812 for (j = i; j != EOC; j = stack_vars[j].next)
814 fputc ('\t', dump_file);
815 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
817 fputc ('\n', dump_file);
821 /* Assign rtl to DECL at BASE + OFFSET. */
823 static void
824 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
825 HOST_WIDE_INT offset)
827 unsigned align;
828 rtx x;
830 /* If this fails, we've overflowed the stack frame. Error nicely? */
831 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
833 x = plus_constant (Pmode, base, offset);
834 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
836 if (TREE_CODE (decl) != SSA_NAME)
838 /* Set alignment we actually gave this decl if it isn't an SSA name.
839 If it is we generate stack slots only accidentally so it isn't as
840 important, we'll simply use the alignment that is already set. */
841 if (base == virtual_stack_vars_rtx)
842 offset -= frame_phase;
843 align = offset & -offset;
844 align *= BITS_PER_UNIT;
845 if (align == 0 || align > base_align)
846 align = base_align;
848 /* One would think that we could assert that we're not decreasing
849 alignment here, but (at least) the i386 port does exactly this
850 via the MINIMUM_ALIGNMENT hook. */
852 DECL_ALIGN (decl) = align;
853 DECL_USER_ALIGN (decl) = 0;
856 set_mem_attributes (x, SSAVAR (decl), true);
857 set_rtl (decl, x);
860 struct stack_vars_data
862 /* Vector of offset pairs, always end of some padding followed
863 by start of the padding that needs Address Sanitizer protection.
864 The vector is in reversed, highest offset pairs come first. */
865 vec<HOST_WIDE_INT> asan_vec;
867 /* Vector of partition representative decls in between the paddings. */
868 vec<tree> asan_decl_vec;
871 /* A subroutine of expand_used_vars. Give each partition representative
872 a unique location within the stack frame. Update each partition member
873 with that location. */
875 static void
876 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
878 size_t si, i, j, n = stack_vars_num;
879 HOST_WIDE_INT large_size = 0, large_alloc = 0;
880 rtx large_base = NULL;
881 unsigned large_align = 0;
882 tree decl;
884 /* Determine if there are any variables requiring "large" alignment.
885 Since these are dynamically allocated, we only process these if
886 no predicate involved. */
887 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
888 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
890 /* Find the total size of these variables. */
891 for (si = 0; si < n; ++si)
893 unsigned alignb;
895 i = stack_vars_sorted[si];
896 alignb = stack_vars[i].alignb;
898 /* Stop when we get to the first decl with "small" alignment. */
899 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
900 break;
902 /* Skip variables that aren't partition representatives. */
903 if (stack_vars[i].representative != i)
904 continue;
906 /* Skip variables that have already had rtl assigned. See also
907 add_stack_var where we perpetrate this pc_rtx hack. */
908 decl = stack_vars[i].decl;
909 if ((TREE_CODE (decl) == SSA_NAME
910 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
911 : DECL_RTL (decl)) != pc_rtx)
912 continue;
914 large_size += alignb - 1;
915 large_size &= -(HOST_WIDE_INT)alignb;
916 large_size += stack_vars[i].size;
919 /* If there were any, allocate space. */
920 if (large_size > 0)
921 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
922 large_align, true);
925 for (si = 0; si < n; ++si)
927 rtx base;
928 unsigned base_align, alignb;
929 HOST_WIDE_INT offset;
931 i = stack_vars_sorted[si];
933 /* Skip variables that aren't partition representatives, for now. */
934 if (stack_vars[i].representative != i)
935 continue;
937 /* Skip variables that have already had rtl assigned. See also
938 add_stack_var where we perpetrate this pc_rtx hack. */
939 decl = stack_vars[i].decl;
940 if ((TREE_CODE (decl) == SSA_NAME
941 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
942 : DECL_RTL (decl)) != pc_rtx)
943 continue;
945 /* Check the predicate to see whether this variable should be
946 allocated in this pass. */
947 if (pred && !pred (i))
948 continue;
950 alignb = stack_vars[i].alignb;
951 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
953 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
955 HOST_WIDE_INT prev_offset = frame_offset;
956 tree repr_decl = NULL_TREE;
958 offset
959 = alloc_stack_frame_space (stack_vars[i].size
960 + ASAN_RED_ZONE_SIZE,
961 MAX (alignb, ASAN_RED_ZONE_SIZE));
962 data->asan_vec.safe_push (prev_offset);
963 data->asan_vec.safe_push (offset + stack_vars[i].size);
964 /* Find best representative of the partition.
965 Prefer those with DECL_NAME, even better
966 satisfying asan_protect_stack_decl predicate. */
967 for (j = i; j != EOC; j = stack_vars[j].next)
968 if (asan_protect_stack_decl (stack_vars[j].decl)
969 && DECL_NAME (stack_vars[j].decl))
971 repr_decl = stack_vars[j].decl;
972 break;
974 else if (repr_decl == NULL_TREE
975 && DECL_P (stack_vars[j].decl)
976 && DECL_NAME (stack_vars[j].decl))
977 repr_decl = stack_vars[j].decl;
978 if (repr_decl == NULL_TREE)
979 repr_decl = stack_vars[i].decl;
980 data->asan_decl_vec.safe_push (repr_decl);
982 else
983 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
984 base = virtual_stack_vars_rtx;
985 base_align = crtl->max_used_stack_slot_alignment;
987 else
989 /* Large alignment is only processed in the last pass. */
990 if (pred)
991 continue;
992 gcc_assert (large_base != NULL);
994 large_alloc += alignb - 1;
995 large_alloc &= -(HOST_WIDE_INT)alignb;
996 offset = large_alloc;
997 large_alloc += stack_vars[i].size;
999 base = large_base;
1000 base_align = large_align;
1003 /* Create rtl for each variable based on their location within the
1004 partition. */
1005 for (j = i; j != EOC; j = stack_vars[j].next)
1007 expand_one_stack_var_at (stack_vars[j].decl,
1008 base, base_align,
1009 offset);
1013 gcc_assert (large_alloc == large_size);
1016 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1017 static HOST_WIDE_INT
1018 account_stack_vars (void)
1020 size_t si, j, i, n = stack_vars_num;
1021 HOST_WIDE_INT size = 0;
1023 for (si = 0; si < n; ++si)
1025 i = stack_vars_sorted[si];
1027 /* Skip variables that aren't partition representatives, for now. */
1028 if (stack_vars[i].representative != i)
1029 continue;
1031 size += stack_vars[i].size;
1032 for (j = i; j != EOC; j = stack_vars[j].next)
1033 set_rtl (stack_vars[j].decl, NULL);
1035 return size;
1038 /* A subroutine of expand_one_var. Called to immediately assign rtl
1039 to a variable to be allocated in the stack frame. */
1041 static void
1042 expand_one_stack_var (tree var)
1044 HOST_WIDE_INT size, offset;
1045 unsigned byte_align;
1047 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1048 byte_align = align_local_variable (SSAVAR (var));
1050 /* We handle highly aligned variables in expand_stack_vars. */
1051 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1053 offset = alloc_stack_frame_space (size, byte_align);
1055 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1056 crtl->max_used_stack_slot_alignment, offset);
1059 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1060 that will reside in a hard register. */
1062 static void
1063 expand_one_hard_reg_var (tree var)
1065 rest_of_decl_compilation (var, 0, 0);
1068 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1069 that will reside in a pseudo register. */
1071 static void
1072 expand_one_register_var (tree var)
1074 tree decl = SSAVAR (var);
1075 tree type = TREE_TYPE (decl);
1076 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1077 rtx x = gen_reg_rtx (reg_mode);
1079 set_rtl (var, x);
1081 /* Note if the object is a user variable. */
1082 if (!DECL_ARTIFICIAL (decl))
1083 mark_user_reg (x);
1085 if (POINTER_TYPE_P (type))
1086 mark_reg_pointer (x, get_pointer_alignment (var));
1089 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1090 has some associated error, e.g. its type is error-mark. We just need
1091 to pick something that won't crash the rest of the compiler. */
1093 static void
1094 expand_one_error_var (tree var)
1096 enum machine_mode mode = DECL_MODE (var);
1097 rtx x;
1099 if (mode == BLKmode)
1100 x = gen_rtx_MEM (BLKmode, const0_rtx);
1101 else if (mode == VOIDmode)
1102 x = const0_rtx;
1103 else
1104 x = gen_reg_rtx (mode);
1106 SET_DECL_RTL (var, x);
1109 /* A subroutine of expand_one_var. VAR is a variable that will be
1110 allocated to the local stack frame. Return true if we wish to
1111 add VAR to STACK_VARS so that it will be coalesced with other
1112 variables. Return false to allocate VAR immediately.
1114 This function is used to reduce the number of variables considered
1115 for coalescing, which reduces the size of the quadratic problem. */
1117 static bool
1118 defer_stack_allocation (tree var, bool toplevel)
1120 /* If stack protection is enabled, *all* stack variables must be deferred,
1121 so that we can re-order the strings to the top of the frame.
1122 Similarly for Address Sanitizer. */
1123 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
1124 return true;
1126 /* We handle "large" alignment via dynamic allocation. We want to handle
1127 this extra complication in only one place, so defer them. */
1128 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1129 return true;
1131 /* Variables in the outermost scope automatically conflict with
1132 every other variable. The only reason to want to defer them
1133 at all is that, after sorting, we can more efficiently pack
1134 small variables in the stack frame. Continue to defer at -O2. */
1135 if (toplevel && optimize < 2)
1136 return false;
1138 /* Without optimization, *most* variables are allocated from the
1139 stack, which makes the quadratic problem large exactly when we
1140 want compilation to proceed as quickly as possible. On the
1141 other hand, we don't want the function's stack frame size to
1142 get completely out of hand. So we avoid adding scalars and
1143 "small" aggregates to the list at all. */
1144 if (optimize == 0
1145 && (tree_low_cst (DECL_SIZE_UNIT (var), 1)
1146 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)))
1147 return false;
1149 return true;
1152 /* A subroutine of expand_used_vars. Expand one variable according to
1153 its flavor. Variables to be placed on the stack are not actually
1154 expanded yet, merely recorded.
1155 When REALLY_EXPAND is false, only add stack values to be allocated.
1156 Return stack usage this variable is supposed to take.
1159 static HOST_WIDE_INT
1160 expand_one_var (tree var, bool toplevel, bool really_expand)
1162 unsigned int align = BITS_PER_UNIT;
1163 tree origvar = var;
1165 var = SSAVAR (var);
1167 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1169 /* Because we don't know if VAR will be in register or on stack,
1170 we conservatively assume it will be on stack even if VAR is
1171 eventually put into register after RA pass. For non-automatic
1172 variables, which won't be on stack, we collect alignment of
1173 type and ignore user specified alignment. */
1174 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1175 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1176 TYPE_MODE (TREE_TYPE (var)),
1177 TYPE_ALIGN (TREE_TYPE (var)));
1178 else if (DECL_HAS_VALUE_EXPR_P (var)
1179 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1180 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1181 or variables which were assigned a stack slot already by
1182 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1183 changed from the offset chosen to it. */
1184 align = crtl->stack_alignment_estimated;
1185 else
1186 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1188 /* If the variable alignment is very large we'll dynamicaly allocate
1189 it, which means that in-frame portion is just a pointer. */
1190 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1191 align = POINTER_SIZE;
1194 if (SUPPORTS_STACK_ALIGNMENT
1195 && crtl->stack_alignment_estimated < align)
1197 /* stack_alignment_estimated shouldn't change after stack
1198 realign decision made */
1199 gcc_assert (!crtl->stack_realign_processed);
1200 crtl->stack_alignment_estimated = align;
1203 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1204 So here we only make sure stack_alignment_needed >= align. */
1205 if (crtl->stack_alignment_needed < align)
1206 crtl->stack_alignment_needed = align;
1207 if (crtl->max_used_stack_slot_alignment < align)
1208 crtl->max_used_stack_slot_alignment = align;
1210 if (TREE_CODE (origvar) == SSA_NAME)
1212 gcc_assert (TREE_CODE (var) != VAR_DECL
1213 || (!DECL_EXTERNAL (var)
1214 && !DECL_HAS_VALUE_EXPR_P (var)
1215 && !TREE_STATIC (var)
1216 && TREE_TYPE (var) != error_mark_node
1217 && !DECL_HARD_REGISTER (var)
1218 && really_expand));
1220 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1222 else if (DECL_EXTERNAL (var))
1224 else if (DECL_HAS_VALUE_EXPR_P (var))
1226 else if (TREE_STATIC (var))
1228 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1230 else if (TREE_TYPE (var) == error_mark_node)
1232 if (really_expand)
1233 expand_one_error_var (var);
1235 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1237 if (really_expand)
1238 expand_one_hard_reg_var (var);
1240 else if (use_register_for_decl (var))
1242 if (really_expand)
1243 expand_one_register_var (origvar);
1245 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1247 /* Reject variables which cover more than half of the address-space. */
1248 if (really_expand)
1250 error ("size of variable %q+D is too large", var);
1251 expand_one_error_var (var);
1254 else if (defer_stack_allocation (var, toplevel))
1255 add_stack_var (origvar);
1256 else
1258 if (really_expand)
1259 expand_one_stack_var (origvar);
1260 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1262 return 0;
1265 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1266 expanding variables. Those variables that can be put into registers
1267 are allocated pseudos; those that can't are put on the stack.
1269 TOPLEVEL is true if this is the outermost BLOCK. */
1271 static void
1272 expand_used_vars_for_block (tree block, bool toplevel)
1274 tree t;
1276 /* Expand all variables at this level. */
1277 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1278 if (TREE_USED (t)
1279 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1280 || !DECL_NONSHAREABLE (t)))
1281 expand_one_var (t, toplevel, true);
1283 /* Expand all variables at containing levels. */
1284 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1285 expand_used_vars_for_block (t, false);
1288 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1289 and clear TREE_USED on all local variables. */
1291 static void
1292 clear_tree_used (tree block)
1294 tree t;
1296 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1297 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1298 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1299 || !DECL_NONSHAREABLE (t))
1300 TREE_USED (t) = 0;
1302 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1303 clear_tree_used (t);
1306 enum {
1307 SPCT_FLAG_DEFAULT = 1,
1308 SPCT_FLAG_ALL = 2,
1309 SPCT_FLAG_STRONG = 3
1312 /* Examine TYPE and determine a bit mask of the following features. */
1314 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1315 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1316 #define SPCT_HAS_ARRAY 4
1317 #define SPCT_HAS_AGGREGATE 8
1319 static unsigned int
1320 stack_protect_classify_type (tree type)
1322 unsigned int ret = 0;
1323 tree t;
1325 switch (TREE_CODE (type))
1327 case ARRAY_TYPE:
1328 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1329 if (t == char_type_node
1330 || t == signed_char_type_node
1331 || t == unsigned_char_type_node)
1333 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1334 unsigned HOST_WIDE_INT len;
1336 if (!TYPE_SIZE_UNIT (type)
1337 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1338 len = max;
1339 else
1340 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1342 if (len < max)
1343 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1344 else
1345 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1347 else
1348 ret = SPCT_HAS_ARRAY;
1349 break;
1351 case UNION_TYPE:
1352 case QUAL_UNION_TYPE:
1353 case RECORD_TYPE:
1354 ret = SPCT_HAS_AGGREGATE;
1355 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1356 if (TREE_CODE (t) == FIELD_DECL)
1357 ret |= stack_protect_classify_type (TREE_TYPE (t));
1358 break;
1360 default:
1361 break;
1364 return ret;
1367 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1368 part of the local stack frame. Remember if we ever return nonzero for
1369 any variable in this function. The return value is the phase number in
1370 which the variable should be allocated. */
1372 static int
1373 stack_protect_decl_phase (tree decl)
1375 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1376 int ret = 0;
1378 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1379 has_short_buffer = true;
1381 if (flag_stack_protect == SPCT_FLAG_ALL
1382 || flag_stack_protect == SPCT_FLAG_STRONG)
1384 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1385 && !(bits & SPCT_HAS_AGGREGATE))
1386 ret = 1;
1387 else if (bits & SPCT_HAS_ARRAY)
1388 ret = 2;
1390 else
1391 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1393 if (ret)
1394 has_protected_decls = true;
1396 return ret;
1399 /* Two helper routines that check for phase 1 and phase 2. These are used
1400 as callbacks for expand_stack_vars. */
1402 static bool
1403 stack_protect_decl_phase_1 (size_t i)
1405 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1408 static bool
1409 stack_protect_decl_phase_2 (size_t i)
1411 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1414 /* And helper function that checks for asan phase (with stack protector
1415 it is phase 3). This is used as callback for expand_stack_vars.
1416 Returns true if any of the vars in the partition need to be protected. */
1418 static bool
1419 asan_decl_phase_3 (size_t i)
1421 while (i != EOC)
1423 if (asan_protect_stack_decl (stack_vars[i].decl))
1424 return true;
1425 i = stack_vars[i].next;
1427 return false;
1430 /* Ensure that variables in different stack protection phases conflict
1431 so that they are not merged and share the same stack slot. */
1433 static void
1434 add_stack_protection_conflicts (void)
1436 size_t i, j, n = stack_vars_num;
1437 unsigned char *phase;
1439 phase = XNEWVEC (unsigned char, n);
1440 for (i = 0; i < n; ++i)
1441 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1443 for (i = 0; i < n; ++i)
1445 unsigned char ph_i = phase[i];
1446 for (j = i + 1; j < n; ++j)
1447 if (ph_i != phase[j])
1448 add_stack_var_conflict (i, j);
1451 XDELETEVEC (phase);
1454 /* Create a decl for the guard at the top of the stack frame. */
1456 static void
1457 create_stack_guard (void)
1459 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1460 VAR_DECL, NULL, ptr_type_node);
1461 TREE_THIS_VOLATILE (guard) = 1;
1462 TREE_USED (guard) = 1;
1463 expand_one_stack_var (guard);
1464 crtl->stack_protect_guard = guard;
1467 /* Prepare for expanding variables. */
1468 static void
1469 init_vars_expansion (void)
1471 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1472 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1474 /* A map from decl to stack partition. */
1475 decl_to_stack_part = pointer_map_create ();
1477 /* Initialize local stack smashing state. */
1478 has_protected_decls = false;
1479 has_short_buffer = false;
1482 /* Free up stack variable graph data. */
1483 static void
1484 fini_vars_expansion (void)
1486 bitmap_obstack_release (&stack_var_bitmap_obstack);
1487 if (stack_vars)
1488 XDELETEVEC (stack_vars);
1489 if (stack_vars_sorted)
1490 XDELETEVEC (stack_vars_sorted);
1491 stack_vars = NULL;
1492 stack_vars_sorted = NULL;
1493 stack_vars_alloc = stack_vars_num = 0;
1494 pointer_map_destroy (decl_to_stack_part);
1495 decl_to_stack_part = NULL;
1498 /* Make a fair guess for the size of the stack frame of the function
1499 in NODE. This doesn't have to be exact, the result is only used in
1500 the inline heuristics. So we don't want to run the full stack var
1501 packing algorithm (which is quadratic in the number of stack vars).
1502 Instead, we calculate the total size of all stack vars. This turns
1503 out to be a pretty fair estimate -- packing of stack vars doesn't
1504 happen very often. */
1506 HOST_WIDE_INT
1507 estimated_stack_frame_size (struct cgraph_node *node)
1509 HOST_WIDE_INT size = 0;
1510 size_t i;
1511 tree var;
1512 struct function *fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
1514 push_cfun (fn);
1516 init_vars_expansion ();
1518 FOR_EACH_LOCAL_DECL (fn, i, var)
1519 if (auto_var_in_fn_p (var, fn->decl))
1520 size += expand_one_var (var, true, false);
1522 if (stack_vars_num > 0)
1524 /* Fake sorting the stack vars for account_stack_vars (). */
1525 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1526 for (i = 0; i < stack_vars_num; ++i)
1527 stack_vars_sorted[i] = i;
1528 size += account_stack_vars ();
1531 fini_vars_expansion ();
1532 pop_cfun ();
1533 return size;
1536 /* Helper routine to check if a record or union contains an array field. */
1538 static int
1539 record_or_union_type_has_array_p (const_tree tree_type)
1541 tree fields = TYPE_FIELDS (tree_type);
1542 tree f;
1544 for (f = fields; f; f = DECL_CHAIN (f))
1545 if (TREE_CODE (f) == FIELD_DECL)
1547 tree field_type = TREE_TYPE (f);
1548 if (RECORD_OR_UNION_TYPE_P (field_type)
1549 && record_or_union_type_has_array_p (field_type))
1550 return 1;
1551 if (TREE_CODE (field_type) == ARRAY_TYPE)
1552 return 1;
1554 return 0;
1557 /* Expand all variables used in the function. */
1559 static rtx
1560 expand_used_vars (void)
1562 tree var, outer_block = DECL_INITIAL (current_function_decl);
1563 vec<tree> maybe_local_decls = vNULL;
1564 rtx var_end_seq = NULL_RTX;
1565 struct pointer_map_t *ssa_name_decls;
1566 unsigned i;
1567 unsigned len;
1568 bool gen_stack_protect_signal = false;
1570 /* Compute the phase of the stack frame for this function. */
1572 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1573 int off = STARTING_FRAME_OFFSET % align;
1574 frame_phase = off ? align - off : 0;
1577 /* Set TREE_USED on all variables in the local_decls. */
1578 FOR_EACH_LOCAL_DECL (cfun, i, var)
1579 TREE_USED (var) = 1;
1580 /* Clear TREE_USED on all variables associated with a block scope. */
1581 clear_tree_used (DECL_INITIAL (current_function_decl));
1583 init_vars_expansion ();
1585 ssa_name_decls = pointer_map_create ();
1586 for (i = 0; i < SA.map->num_partitions; i++)
1588 tree var = partition_to_var (SA.map, i);
1590 gcc_assert (!virtual_operand_p (var));
1592 /* Assign decls to each SSA name partition, share decls for partitions
1593 we could have coalesced (those with the same type). */
1594 if (SSA_NAME_VAR (var) == NULL_TREE)
1596 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1597 if (!*slot)
1598 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1599 replace_ssa_name_symbol (var, (tree) *slot);
1602 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1603 expand_one_var (var, true, true);
1604 else
1606 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1607 contain the default def (representing the parm or result itself)
1608 we don't do anything here. But those which don't contain the
1609 default def (representing a temporary based on the parm/result)
1610 we need to allocate space just like for normal VAR_DECLs. */
1611 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1613 expand_one_var (var, true, true);
1614 gcc_assert (SA.partition_to_pseudo[i]);
1618 pointer_map_destroy (ssa_name_decls);
1620 if (flag_stack_protect == SPCT_FLAG_STRONG)
1621 FOR_EACH_LOCAL_DECL (cfun, i, var)
1622 if (!is_global_var (var))
1624 tree var_type = TREE_TYPE (var);
1625 /* Examine local referenced variables that have their addresses taken,
1626 contain an array, or are arrays. */
1627 if (TREE_CODE (var) == VAR_DECL
1628 && (TREE_CODE (var_type) == ARRAY_TYPE
1629 || TREE_ADDRESSABLE (var)
1630 || (RECORD_OR_UNION_TYPE_P (var_type)
1631 && record_or_union_type_has_array_p (var_type))))
1633 gen_stack_protect_signal = true;
1634 break;
1638 /* At this point all variables on the local_decls with TREE_USED
1639 set are not associated with any block scope. Lay them out. */
1641 len = vec_safe_length (cfun->local_decls);
1642 FOR_EACH_LOCAL_DECL (cfun, i, var)
1644 bool expand_now = false;
1646 /* Expanded above already. */
1647 if (is_gimple_reg (var))
1649 TREE_USED (var) = 0;
1650 goto next;
1652 /* We didn't set a block for static or extern because it's hard
1653 to tell the difference between a global variable (re)declared
1654 in a local scope, and one that's really declared there to
1655 begin with. And it doesn't really matter much, since we're
1656 not giving them stack space. Expand them now. */
1657 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1658 expand_now = true;
1660 /* If the variable is not associated with any block, then it
1661 was created by the optimizers, and could be live anywhere
1662 in the function. */
1663 else if (TREE_USED (var))
1664 expand_now = true;
1666 /* Finally, mark all variables on the list as used. We'll use
1667 this in a moment when we expand those associated with scopes. */
1668 TREE_USED (var) = 1;
1670 if (expand_now)
1671 expand_one_var (var, true, true);
1673 next:
1674 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1676 rtx rtl = DECL_RTL_IF_SET (var);
1678 /* Keep artificial non-ignored vars in cfun->local_decls
1679 chain until instantiate_decls. */
1680 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1681 add_local_decl (cfun, var);
1682 else if (rtl == NULL_RTX)
1683 /* If rtl isn't set yet, which can happen e.g. with
1684 -fstack-protector, retry before returning from this
1685 function. */
1686 maybe_local_decls.safe_push (var);
1690 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1692 +-----------------+-----------------+
1693 | ...processed... | ...duplicates...|
1694 +-----------------+-----------------+
1696 +-- LEN points here.
1698 We just want the duplicates, as those are the artificial
1699 non-ignored vars that we want to keep until instantiate_decls.
1700 Move them down and truncate the array. */
1701 if (!vec_safe_is_empty (cfun->local_decls))
1702 cfun->local_decls->block_remove (0, len);
1704 /* At this point, all variables within the block tree with TREE_USED
1705 set are actually used by the optimized function. Lay them out. */
1706 expand_used_vars_for_block (outer_block, true);
1708 if (stack_vars_num > 0)
1710 add_scope_conflicts ();
1712 /* If stack protection is enabled, we don't share space between
1713 vulnerable data and non-vulnerable data. */
1714 if (flag_stack_protect)
1715 add_stack_protection_conflicts ();
1717 /* Now that we have collected all stack variables, and have computed a
1718 minimal interference graph, attempt to save some stack space. */
1719 partition_stack_vars ();
1720 if (dump_file)
1721 dump_stack_var_partition ();
1724 switch (flag_stack_protect)
1726 case SPCT_FLAG_ALL:
1727 create_stack_guard ();
1728 break;
1730 case SPCT_FLAG_STRONG:
1731 if (gen_stack_protect_signal
1732 || cfun->calls_alloca || has_protected_decls)
1733 create_stack_guard ();
1734 break;
1736 case SPCT_FLAG_DEFAULT:
1737 if (cfun->calls_alloca || has_protected_decls)
1738 create_stack_guard ();
1739 break;
1741 default:
1745 /* Assign rtl to each variable based on these partitions. */
1746 if (stack_vars_num > 0)
1748 struct stack_vars_data data;
1750 data.asan_vec = vNULL;
1751 data.asan_decl_vec = vNULL;
1753 /* Reorder decls to be protected by iterating over the variables
1754 array multiple times, and allocating out of each phase in turn. */
1755 /* ??? We could probably integrate this into the qsort we did
1756 earlier, such that we naturally see these variables first,
1757 and thus naturally allocate things in the right order. */
1758 if (has_protected_decls)
1760 /* Phase 1 contains only character arrays. */
1761 expand_stack_vars (stack_protect_decl_phase_1, &data);
1763 /* Phase 2 contains other kinds of arrays. */
1764 if (flag_stack_protect == 2)
1765 expand_stack_vars (stack_protect_decl_phase_2, &data);
1768 if (flag_sanitize & SANITIZE_ADDRESS)
1769 /* Phase 3, any partitions that need asan protection
1770 in addition to phase 1 and 2. */
1771 expand_stack_vars (asan_decl_phase_3, &data);
1773 if (!data.asan_vec.is_empty ())
1775 HOST_WIDE_INT prev_offset = frame_offset;
1776 HOST_WIDE_INT offset
1777 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1778 ASAN_RED_ZONE_SIZE);
1779 data.asan_vec.safe_push (prev_offset);
1780 data.asan_vec.safe_push (offset);
1782 var_end_seq
1783 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1784 data.asan_vec.address (),
1785 data.asan_decl_vec. address (),
1786 data.asan_vec.length ());
1789 expand_stack_vars (NULL, &data);
1791 data.asan_vec.release ();
1792 data.asan_decl_vec.release ();
1795 fini_vars_expansion ();
1797 /* If there were any artificial non-ignored vars without rtl
1798 found earlier, see if deferred stack allocation hasn't assigned
1799 rtl to them. */
1800 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1802 rtx rtl = DECL_RTL_IF_SET (var);
1804 /* Keep artificial non-ignored vars in cfun->local_decls
1805 chain until instantiate_decls. */
1806 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1807 add_local_decl (cfun, var);
1809 maybe_local_decls.release ();
1811 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1812 if (STACK_ALIGNMENT_NEEDED)
1814 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1815 if (!FRAME_GROWS_DOWNWARD)
1816 frame_offset += align - 1;
1817 frame_offset &= -align;
1820 return var_end_seq;
1824 /* If we need to produce a detailed dump, print the tree representation
1825 for STMT to the dump file. SINCE is the last RTX after which the RTL
1826 generated for STMT should have been appended. */
1828 static void
1829 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1831 if (dump_file && (dump_flags & TDF_DETAILS))
1833 fprintf (dump_file, "\n;; ");
1834 print_gimple_stmt (dump_file, stmt, 0,
1835 TDF_SLIM | (dump_flags & TDF_LINENO));
1836 fprintf (dump_file, "\n");
1838 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1842 /* Maps the blocks that do not contain tree labels to rtx labels. */
1844 static struct pointer_map_t *lab_rtx_for_bb;
1846 /* Returns the label_rtx expression for a label starting basic block BB. */
1848 static rtx
1849 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1851 gimple_stmt_iterator gsi;
1852 tree lab;
1853 gimple lab_stmt;
1854 void **elt;
1856 if (bb->flags & BB_RTL)
1857 return block_label (bb);
1859 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1860 if (elt)
1861 return (rtx) *elt;
1863 /* Find the tree label if it is present. */
1865 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1867 lab_stmt = gsi_stmt (gsi);
1868 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1869 break;
1871 lab = gimple_label_label (lab_stmt);
1872 if (DECL_NONLOCAL (lab))
1873 break;
1875 return label_rtx (lab);
1878 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1879 *elt = gen_label_rtx ();
1880 return (rtx) *elt;
1884 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1885 of a basic block where we just expanded the conditional at the end,
1886 possibly clean up the CFG and instruction sequence. LAST is the
1887 last instruction before the just emitted jump sequence. */
1889 static void
1890 maybe_cleanup_end_of_block (edge e, rtx last)
1892 /* Special case: when jumpif decides that the condition is
1893 trivial it emits an unconditional jump (and the necessary
1894 barrier). But we still have two edges, the fallthru one is
1895 wrong. purge_dead_edges would clean this up later. Unfortunately
1896 we have to insert insns (and split edges) before
1897 find_many_sub_basic_blocks and hence before purge_dead_edges.
1898 But splitting edges might create new blocks which depend on the
1899 fact that if there are two edges there's no barrier. So the
1900 barrier would get lost and verify_flow_info would ICE. Instead
1901 of auditing all edge splitters to care for the barrier (which
1902 normally isn't there in a cleaned CFG), fix it here. */
1903 if (BARRIER_P (get_last_insn ()))
1905 rtx insn;
1906 remove_edge (e);
1907 /* Now, we have a single successor block, if we have insns to
1908 insert on the remaining edge we potentially will insert
1909 it at the end of this block (if the dest block isn't feasible)
1910 in order to avoid splitting the edge. This insertion will take
1911 place in front of the last jump. But we might have emitted
1912 multiple jumps (conditional and one unconditional) to the
1913 same destination. Inserting in front of the last one then
1914 is a problem. See PR 40021. We fix this by deleting all
1915 jumps except the last unconditional one. */
1916 insn = PREV_INSN (get_last_insn ());
1917 /* Make sure we have an unconditional jump. Otherwise we're
1918 confused. */
1919 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1920 for (insn = PREV_INSN (insn); insn != last;)
1922 insn = PREV_INSN (insn);
1923 if (JUMP_P (NEXT_INSN (insn)))
1925 if (!any_condjump_p (NEXT_INSN (insn)))
1927 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1928 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1930 delete_insn (NEXT_INSN (insn));
1936 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1937 Returns a new basic block if we've terminated the current basic
1938 block and created a new one. */
1940 static basic_block
1941 expand_gimple_cond (basic_block bb, gimple stmt)
1943 basic_block new_bb, dest;
1944 edge new_edge;
1945 edge true_edge;
1946 edge false_edge;
1947 rtx last2, last;
1948 enum tree_code code;
1949 tree op0, op1;
1951 code = gimple_cond_code (stmt);
1952 op0 = gimple_cond_lhs (stmt);
1953 op1 = gimple_cond_rhs (stmt);
1954 /* We're sometimes presented with such code:
1955 D.123_1 = x < y;
1956 if (D.123_1 != 0)
1958 This would expand to two comparisons which then later might
1959 be cleaned up by combine. But some pattern matchers like if-conversion
1960 work better when there's only one compare, so make up for this
1961 here as special exception if TER would have made the same change. */
1962 if (SA.values
1963 && TREE_CODE (op0) == SSA_NAME
1964 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1965 && TREE_CODE (op1) == INTEGER_CST
1966 && ((gimple_cond_code (stmt) == NE_EXPR
1967 && integer_zerop (op1))
1968 || (gimple_cond_code (stmt) == EQ_EXPR
1969 && integer_onep (op1)))
1970 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1972 gimple second = SSA_NAME_DEF_STMT (op0);
1973 if (gimple_code (second) == GIMPLE_ASSIGN)
1975 enum tree_code code2 = gimple_assign_rhs_code (second);
1976 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1978 code = code2;
1979 op0 = gimple_assign_rhs1 (second);
1980 op1 = gimple_assign_rhs2 (second);
1982 /* If jumps are cheap turn some more codes into
1983 jumpy sequences. */
1984 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1986 if ((code2 == BIT_AND_EXPR
1987 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1988 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1989 || code2 == TRUTH_AND_EXPR)
1991 code = TRUTH_ANDIF_EXPR;
1992 op0 = gimple_assign_rhs1 (second);
1993 op1 = gimple_assign_rhs2 (second);
1995 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1997 code = TRUTH_ORIF_EXPR;
1998 op0 = gimple_assign_rhs1 (second);
1999 op1 = gimple_assign_rhs2 (second);
2005 last2 = last = get_last_insn ();
2007 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2008 set_curr_insn_location (gimple_location (stmt));
2010 /* These flags have no purpose in RTL land. */
2011 true_edge->flags &= ~EDGE_TRUE_VALUE;
2012 false_edge->flags &= ~EDGE_FALSE_VALUE;
2014 /* We can either have a pure conditional jump with one fallthru edge or
2015 two-way jump that needs to be decomposed into two basic blocks. */
2016 if (false_edge->dest == bb->next_bb)
2018 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2019 true_edge->probability);
2020 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2021 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2022 set_curr_insn_location (true_edge->goto_locus);
2023 false_edge->flags |= EDGE_FALLTHRU;
2024 maybe_cleanup_end_of_block (false_edge, last);
2025 return NULL;
2027 if (true_edge->dest == bb->next_bb)
2029 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2030 false_edge->probability);
2031 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2032 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2033 set_curr_insn_location (false_edge->goto_locus);
2034 true_edge->flags |= EDGE_FALLTHRU;
2035 maybe_cleanup_end_of_block (true_edge, last);
2036 return NULL;
2039 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2040 true_edge->probability);
2041 last = get_last_insn ();
2042 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2043 set_curr_insn_location (false_edge->goto_locus);
2044 emit_jump (label_rtx_for_bb (false_edge->dest));
2046 BB_END (bb) = last;
2047 if (BARRIER_P (BB_END (bb)))
2048 BB_END (bb) = PREV_INSN (BB_END (bb));
2049 update_bb_for_insn (bb);
2051 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2052 dest = false_edge->dest;
2053 redirect_edge_succ (false_edge, new_bb);
2054 false_edge->flags |= EDGE_FALLTHRU;
2055 new_bb->count = false_edge->count;
2056 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2057 if (current_loops && bb->loop_father)
2058 add_bb_to_loop (new_bb, bb->loop_father);
2059 new_edge = make_edge (new_bb, dest, 0);
2060 new_edge->probability = REG_BR_PROB_BASE;
2061 new_edge->count = new_bb->count;
2062 if (BARRIER_P (BB_END (new_bb)))
2063 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2064 update_bb_for_insn (new_bb);
2066 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2068 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2070 set_curr_insn_location (true_edge->goto_locus);
2071 true_edge->goto_locus = curr_insn_location ();
2074 return new_bb;
2077 /* Mark all calls that can have a transaction restart. */
2079 static void
2080 mark_transaction_restart_calls (gimple stmt)
2082 struct tm_restart_node dummy;
2083 void **slot;
2085 if (!cfun->gimple_df->tm_restart)
2086 return;
2088 dummy.stmt = stmt;
2089 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2090 if (slot)
2092 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2093 tree list = n->label_or_list;
2094 rtx insn;
2096 for (insn = next_real_insn (get_last_insn ());
2097 !CALL_P (insn);
2098 insn = next_real_insn (insn))
2099 continue;
2101 if (TREE_CODE (list) == LABEL_DECL)
2102 add_reg_note (insn, REG_TM, label_rtx (list));
2103 else
2104 for (; list ; list = TREE_CHAIN (list))
2105 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2109 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2110 statement STMT. */
2112 static void
2113 expand_call_stmt (gimple stmt)
2115 tree exp, decl, lhs;
2116 bool builtin_p;
2117 size_t i;
2119 if (gimple_call_internal_p (stmt))
2121 expand_internal_call (stmt);
2122 return;
2125 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2127 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2128 decl = gimple_call_fndecl (stmt);
2129 builtin_p = decl && DECL_BUILT_IN (decl);
2131 /* If this is not a builtin function, the function type through which the
2132 call is made may be different from the type of the function. */
2133 if (!builtin_p)
2134 CALL_EXPR_FN (exp)
2135 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2136 CALL_EXPR_FN (exp));
2138 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2139 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2141 for (i = 0; i < gimple_call_num_args (stmt); i++)
2143 tree arg = gimple_call_arg (stmt, i);
2144 gimple def;
2145 /* TER addresses into arguments of builtin functions so we have a
2146 chance to infer more correct alignment information. See PR39954. */
2147 if (builtin_p
2148 && TREE_CODE (arg) == SSA_NAME
2149 && (def = get_gimple_for_ssa_name (arg))
2150 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2151 arg = gimple_assign_rhs1 (def);
2152 CALL_EXPR_ARG (exp, i) = arg;
2155 if (gimple_has_side_effects (stmt))
2156 TREE_SIDE_EFFECTS (exp) = 1;
2158 if (gimple_call_nothrow_p (stmt))
2159 TREE_NOTHROW (exp) = 1;
2161 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2162 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2163 if (decl
2164 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2165 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2166 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2167 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2168 else
2169 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2170 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2171 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2173 /* Ensure RTL is created for debug args. */
2174 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2176 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2177 unsigned int ix;
2178 tree dtemp;
2180 if (debug_args)
2181 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2183 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2184 expand_debug_expr (dtemp);
2188 lhs = gimple_call_lhs (stmt);
2189 if (lhs)
2190 expand_assignment (lhs, exp, false);
2191 else
2192 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2194 mark_transaction_restart_calls (stmt);
2197 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2198 STMT that doesn't require special handling for outgoing edges. That
2199 is no tailcalls and no GIMPLE_COND. */
2201 static void
2202 expand_gimple_stmt_1 (gimple stmt)
2204 tree op0;
2206 set_curr_insn_location (gimple_location (stmt));
2208 switch (gimple_code (stmt))
2210 case GIMPLE_GOTO:
2211 op0 = gimple_goto_dest (stmt);
2212 if (TREE_CODE (op0) == LABEL_DECL)
2213 expand_goto (op0);
2214 else
2215 expand_computed_goto (op0);
2216 break;
2217 case GIMPLE_LABEL:
2218 expand_label (gimple_label_label (stmt));
2219 break;
2220 case GIMPLE_NOP:
2221 case GIMPLE_PREDICT:
2222 break;
2223 case GIMPLE_SWITCH:
2224 expand_case (stmt);
2225 break;
2226 case GIMPLE_ASM:
2227 expand_asm_stmt (stmt);
2228 break;
2229 case GIMPLE_CALL:
2230 expand_call_stmt (stmt);
2231 break;
2233 case GIMPLE_RETURN:
2234 op0 = gimple_return_retval (stmt);
2236 if (op0 && op0 != error_mark_node)
2238 tree result = DECL_RESULT (current_function_decl);
2240 /* If we are not returning the current function's RESULT_DECL,
2241 build an assignment to it. */
2242 if (op0 != result)
2244 /* I believe that a function's RESULT_DECL is unique. */
2245 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2247 /* ??? We'd like to use simply expand_assignment here,
2248 but this fails if the value is of BLKmode but the return
2249 decl is a register. expand_return has special handling
2250 for this combination, which eventually should move
2251 to common code. See comments there. Until then, let's
2252 build a modify expression :-/ */
2253 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2254 result, op0);
2257 if (!op0)
2258 expand_null_return ();
2259 else
2260 expand_return (op0);
2261 break;
2263 case GIMPLE_ASSIGN:
2265 tree lhs = gimple_assign_lhs (stmt);
2267 /* Tree expand used to fiddle with |= and &= of two bitfield
2268 COMPONENT_REFs here. This can't happen with gimple, the LHS
2269 of binary assigns must be a gimple reg. */
2271 if (TREE_CODE (lhs) != SSA_NAME
2272 || get_gimple_rhs_class (gimple_expr_code (stmt))
2273 == GIMPLE_SINGLE_RHS)
2275 tree rhs = gimple_assign_rhs1 (stmt);
2276 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2277 == GIMPLE_SINGLE_RHS);
2278 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2279 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2280 if (TREE_CLOBBER_P (rhs))
2281 /* This is a clobber to mark the going out of scope for
2282 this LHS. */
2284 else
2285 expand_assignment (lhs, rhs,
2286 gimple_assign_nontemporal_move_p (stmt));
2288 else
2290 rtx target, temp;
2291 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2292 struct separate_ops ops;
2293 bool promoted = false;
2295 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2296 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2297 promoted = true;
2299 ops.code = gimple_assign_rhs_code (stmt);
2300 ops.type = TREE_TYPE (lhs);
2301 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2303 case GIMPLE_TERNARY_RHS:
2304 ops.op2 = gimple_assign_rhs3 (stmt);
2305 /* Fallthru */
2306 case GIMPLE_BINARY_RHS:
2307 ops.op1 = gimple_assign_rhs2 (stmt);
2308 /* Fallthru */
2309 case GIMPLE_UNARY_RHS:
2310 ops.op0 = gimple_assign_rhs1 (stmt);
2311 break;
2312 default:
2313 gcc_unreachable ();
2315 ops.location = gimple_location (stmt);
2317 /* If we want to use a nontemporal store, force the value to
2318 register first. If we store into a promoted register,
2319 don't directly expand to target. */
2320 temp = nontemporal || promoted ? NULL_RTX : target;
2321 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2322 EXPAND_NORMAL);
2324 if (temp == target)
2326 else if (promoted)
2328 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2329 /* If TEMP is a VOIDmode constant, use convert_modes to make
2330 sure that we properly convert it. */
2331 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2333 temp = convert_modes (GET_MODE (target),
2334 TYPE_MODE (ops.type),
2335 temp, unsignedp);
2336 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2337 GET_MODE (target), temp, unsignedp);
2340 convert_move (SUBREG_REG (target), temp, unsignedp);
2342 else if (nontemporal && emit_storent_insn (target, temp))
2344 else
2346 temp = force_operand (temp, target);
2347 if (temp != target)
2348 emit_move_insn (target, temp);
2352 break;
2354 default:
2355 gcc_unreachable ();
2359 /* Expand one gimple statement STMT and return the last RTL instruction
2360 before any of the newly generated ones.
2362 In addition to generating the necessary RTL instructions this also
2363 sets REG_EH_REGION notes if necessary and sets the current source
2364 location for diagnostics. */
2366 static rtx
2367 expand_gimple_stmt (gimple stmt)
2369 location_t saved_location = input_location;
2370 rtx last = get_last_insn ();
2371 int lp_nr;
2373 gcc_assert (cfun);
2375 /* We need to save and restore the current source location so that errors
2376 discovered during expansion are emitted with the right location. But
2377 it would be better if the diagnostic routines used the source location
2378 embedded in the tree nodes rather than globals. */
2379 if (gimple_has_location (stmt))
2380 input_location = gimple_location (stmt);
2382 expand_gimple_stmt_1 (stmt);
2384 /* Free any temporaries used to evaluate this statement. */
2385 free_temp_slots ();
2387 input_location = saved_location;
2389 /* Mark all insns that may trap. */
2390 lp_nr = lookup_stmt_eh_lp (stmt);
2391 if (lp_nr)
2393 rtx insn;
2394 for (insn = next_real_insn (last); insn;
2395 insn = next_real_insn (insn))
2397 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2398 /* If we want exceptions for non-call insns, any
2399 may_trap_p instruction may throw. */
2400 && GET_CODE (PATTERN (insn)) != CLOBBER
2401 && GET_CODE (PATTERN (insn)) != USE
2402 && insn_could_throw_p (insn))
2403 make_reg_eh_region_note (insn, 0, lp_nr);
2407 return last;
2410 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2411 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2412 generated a tail call (something that might be denied by the ABI
2413 rules governing the call; see calls.c).
2415 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2416 can still reach the rest of BB. The case here is __builtin_sqrt,
2417 where the NaN result goes through the external function (with a
2418 tailcall) and the normal result happens via a sqrt instruction. */
2420 static basic_block
2421 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2423 rtx last2, last;
2424 edge e;
2425 edge_iterator ei;
2426 int probability;
2427 gcov_type count;
2429 last2 = last = expand_gimple_stmt (stmt);
2431 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2432 if (CALL_P (last) && SIBLING_CALL_P (last))
2433 goto found;
2435 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2437 *can_fallthru = true;
2438 return NULL;
2440 found:
2441 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2442 Any instructions emitted here are about to be deleted. */
2443 do_pending_stack_adjust ();
2445 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2446 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2447 EH or abnormal edges, we shouldn't have created a tail call in
2448 the first place. So it seems to me we should just be removing
2449 all edges here, or redirecting the existing fallthru edge to
2450 the exit block. */
2452 probability = 0;
2453 count = 0;
2455 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2457 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2459 if (e->dest != EXIT_BLOCK_PTR)
2461 e->dest->count -= e->count;
2462 e->dest->frequency -= EDGE_FREQUENCY (e);
2463 if (e->dest->count < 0)
2464 e->dest->count = 0;
2465 if (e->dest->frequency < 0)
2466 e->dest->frequency = 0;
2468 count += e->count;
2469 probability += e->probability;
2470 remove_edge (e);
2472 else
2473 ei_next (&ei);
2476 /* This is somewhat ugly: the call_expr expander often emits instructions
2477 after the sibcall (to perform the function return). These confuse the
2478 find_many_sub_basic_blocks code, so we need to get rid of these. */
2479 last = NEXT_INSN (last);
2480 gcc_assert (BARRIER_P (last));
2482 *can_fallthru = false;
2483 while (NEXT_INSN (last))
2485 /* For instance an sqrt builtin expander expands if with
2486 sibcall in the then and label for `else`. */
2487 if (LABEL_P (NEXT_INSN (last)))
2489 *can_fallthru = true;
2490 break;
2492 delete_insn (NEXT_INSN (last));
2495 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2496 e->probability += probability;
2497 e->count += count;
2498 BB_END (bb) = last;
2499 update_bb_for_insn (bb);
2501 if (NEXT_INSN (last))
2503 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2505 last = BB_END (bb);
2506 if (BARRIER_P (last))
2507 BB_END (bb) = PREV_INSN (last);
2510 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2512 return bb;
2515 /* Return the difference between the floor and the truncated result of
2516 a signed division by OP1 with remainder MOD. */
2517 static rtx
2518 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2520 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2521 return gen_rtx_IF_THEN_ELSE
2522 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2523 gen_rtx_IF_THEN_ELSE
2524 (mode, gen_rtx_LT (BImode,
2525 gen_rtx_DIV (mode, op1, mod),
2526 const0_rtx),
2527 constm1_rtx, const0_rtx),
2528 const0_rtx);
2531 /* Return the difference between the ceil and the truncated result of
2532 a signed division by OP1 with remainder MOD. */
2533 static rtx
2534 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2536 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2537 return gen_rtx_IF_THEN_ELSE
2538 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2539 gen_rtx_IF_THEN_ELSE
2540 (mode, gen_rtx_GT (BImode,
2541 gen_rtx_DIV (mode, op1, mod),
2542 const0_rtx),
2543 const1_rtx, const0_rtx),
2544 const0_rtx);
2547 /* Return the difference between the ceil and the truncated result of
2548 an unsigned division by OP1 with remainder MOD. */
2549 static rtx
2550 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2552 /* (mod != 0 ? 1 : 0) */
2553 return gen_rtx_IF_THEN_ELSE
2554 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2555 const1_rtx, const0_rtx);
2558 /* Return the difference between the rounded and the truncated result
2559 of a signed division by OP1 with remainder MOD. Halfway cases are
2560 rounded away from zero, rather than to the nearest even number. */
2561 static rtx
2562 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2564 /* (abs (mod) >= abs (op1) - abs (mod)
2565 ? (op1 / mod > 0 ? 1 : -1)
2566 : 0) */
2567 return gen_rtx_IF_THEN_ELSE
2568 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2569 gen_rtx_MINUS (mode,
2570 gen_rtx_ABS (mode, op1),
2571 gen_rtx_ABS (mode, mod))),
2572 gen_rtx_IF_THEN_ELSE
2573 (mode, gen_rtx_GT (BImode,
2574 gen_rtx_DIV (mode, op1, mod),
2575 const0_rtx),
2576 const1_rtx, constm1_rtx),
2577 const0_rtx);
2580 /* Return the difference between the rounded and the truncated result
2581 of a unsigned division by OP1 with remainder MOD. Halfway cases
2582 are rounded away from zero, rather than to the nearest even
2583 number. */
2584 static rtx
2585 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2587 /* (mod >= op1 - mod ? 1 : 0) */
2588 return gen_rtx_IF_THEN_ELSE
2589 (mode, gen_rtx_GE (BImode, mod,
2590 gen_rtx_MINUS (mode, op1, mod)),
2591 const1_rtx, const0_rtx);
2594 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2595 any rtl. */
2597 static rtx
2598 convert_debug_memory_address (enum machine_mode mode, rtx x,
2599 addr_space_t as)
2601 enum machine_mode xmode = GET_MODE (x);
2603 #ifndef POINTERS_EXTEND_UNSIGNED
2604 gcc_assert (mode == Pmode
2605 || mode == targetm.addr_space.address_mode (as));
2606 gcc_assert (xmode == mode || xmode == VOIDmode);
2607 #else
2608 rtx temp;
2610 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2612 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2613 return x;
2615 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2616 x = simplify_gen_subreg (mode, x, xmode,
2617 subreg_lowpart_offset
2618 (mode, xmode));
2619 else if (POINTERS_EXTEND_UNSIGNED > 0)
2620 x = gen_rtx_ZERO_EXTEND (mode, x);
2621 else if (!POINTERS_EXTEND_UNSIGNED)
2622 x = gen_rtx_SIGN_EXTEND (mode, x);
2623 else
2625 switch (GET_CODE (x))
2627 case SUBREG:
2628 if ((SUBREG_PROMOTED_VAR_P (x)
2629 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2630 || (GET_CODE (SUBREG_REG (x)) == PLUS
2631 && REG_P (XEXP (SUBREG_REG (x), 0))
2632 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2633 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2634 && GET_MODE (SUBREG_REG (x)) == mode)
2635 return SUBREG_REG (x);
2636 break;
2637 case LABEL_REF:
2638 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2639 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2640 return temp;
2641 case SYMBOL_REF:
2642 temp = shallow_copy_rtx (x);
2643 PUT_MODE (temp, mode);
2644 return temp;
2645 case CONST:
2646 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2647 if (temp)
2648 temp = gen_rtx_CONST (mode, temp);
2649 return temp;
2650 case PLUS:
2651 case MINUS:
2652 if (CONST_INT_P (XEXP (x, 1)))
2654 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2655 if (temp)
2656 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2658 break;
2659 default:
2660 break;
2662 /* Don't know how to express ptr_extend as operation in debug info. */
2663 return NULL;
2665 #endif /* POINTERS_EXTEND_UNSIGNED */
2667 return x;
2670 /* Return an RTX equivalent to the value of the parameter DECL. */
2672 static rtx
2673 expand_debug_parm_decl (tree decl)
2675 rtx incoming = DECL_INCOMING_RTL (decl);
2677 if (incoming
2678 && GET_MODE (incoming) != BLKmode
2679 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2680 || (MEM_P (incoming)
2681 && REG_P (XEXP (incoming, 0))
2682 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2684 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2686 #ifdef HAVE_window_save
2687 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2688 If the target machine has an explicit window save instruction, the
2689 actual entry value is the corresponding OUTGOING_REGNO instead. */
2690 if (REG_P (incoming)
2691 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2692 incoming
2693 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2694 OUTGOING_REGNO (REGNO (incoming)), 0);
2695 else if (MEM_P (incoming))
2697 rtx reg = XEXP (incoming, 0);
2698 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2700 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2701 incoming = replace_equiv_address_nv (incoming, reg);
2703 else
2704 incoming = copy_rtx (incoming);
2706 #endif
2708 ENTRY_VALUE_EXP (rtl) = incoming;
2709 return rtl;
2712 if (incoming
2713 && GET_MODE (incoming) != BLKmode
2714 && !TREE_ADDRESSABLE (decl)
2715 && MEM_P (incoming)
2716 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2717 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2718 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2719 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2720 return copy_rtx (incoming);
2722 return NULL_RTX;
2725 /* Return an RTX equivalent to the value of the tree expression EXP. */
2727 static rtx
2728 expand_debug_expr (tree exp)
2730 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2731 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2732 enum machine_mode inner_mode = VOIDmode;
2733 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2734 addr_space_t as;
2736 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2738 case tcc_expression:
2739 switch (TREE_CODE (exp))
2741 case COND_EXPR:
2742 case DOT_PROD_EXPR:
2743 case WIDEN_MULT_PLUS_EXPR:
2744 case WIDEN_MULT_MINUS_EXPR:
2745 case FMA_EXPR:
2746 goto ternary;
2748 case TRUTH_ANDIF_EXPR:
2749 case TRUTH_ORIF_EXPR:
2750 case TRUTH_AND_EXPR:
2751 case TRUTH_OR_EXPR:
2752 case TRUTH_XOR_EXPR:
2753 goto binary;
2755 case TRUTH_NOT_EXPR:
2756 goto unary;
2758 default:
2759 break;
2761 break;
2763 ternary:
2764 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2765 if (!op2)
2766 return NULL_RTX;
2767 /* Fall through. */
2769 binary:
2770 case tcc_binary:
2771 case tcc_comparison:
2772 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2773 if (!op1)
2774 return NULL_RTX;
2775 /* Fall through. */
2777 unary:
2778 case tcc_unary:
2779 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2780 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2781 if (!op0)
2782 return NULL_RTX;
2783 break;
2785 case tcc_type:
2786 case tcc_statement:
2787 gcc_unreachable ();
2789 case tcc_constant:
2790 case tcc_exceptional:
2791 case tcc_declaration:
2792 case tcc_reference:
2793 case tcc_vl_exp:
2794 break;
2797 switch (TREE_CODE (exp))
2799 case STRING_CST:
2800 if (!lookup_constant_def (exp))
2802 if (strlen (TREE_STRING_POINTER (exp)) + 1
2803 != (size_t) TREE_STRING_LENGTH (exp))
2804 return NULL_RTX;
2805 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2806 op0 = gen_rtx_MEM (BLKmode, op0);
2807 set_mem_attributes (op0, exp, 0);
2808 return op0;
2810 /* Fall through... */
2812 case INTEGER_CST:
2813 case REAL_CST:
2814 case FIXED_CST:
2815 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2816 return op0;
2818 case COMPLEX_CST:
2819 gcc_assert (COMPLEX_MODE_P (mode));
2820 op0 = expand_debug_expr (TREE_REALPART (exp));
2821 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2822 return gen_rtx_CONCAT (mode, op0, op1);
2824 case DEBUG_EXPR_DECL:
2825 op0 = DECL_RTL_IF_SET (exp);
2827 if (op0)
2828 return op0;
2830 op0 = gen_rtx_DEBUG_EXPR (mode);
2831 DEBUG_EXPR_TREE_DECL (op0) = exp;
2832 SET_DECL_RTL (exp, op0);
2834 return op0;
2836 case VAR_DECL:
2837 case PARM_DECL:
2838 case FUNCTION_DECL:
2839 case LABEL_DECL:
2840 case CONST_DECL:
2841 case RESULT_DECL:
2842 op0 = DECL_RTL_IF_SET (exp);
2844 /* This decl was probably optimized away. */
2845 if (!op0)
2847 if (TREE_CODE (exp) != VAR_DECL
2848 || DECL_EXTERNAL (exp)
2849 || !TREE_STATIC (exp)
2850 || !DECL_NAME (exp)
2851 || DECL_HARD_REGISTER (exp)
2852 || DECL_IN_CONSTANT_POOL (exp)
2853 || mode == VOIDmode)
2854 return NULL;
2856 op0 = make_decl_rtl_for_debug (exp);
2857 if (!MEM_P (op0)
2858 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2859 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2860 return NULL;
2862 else
2863 op0 = copy_rtx (op0);
2865 if (GET_MODE (op0) == BLKmode
2866 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2867 below would ICE. While it is likely a FE bug,
2868 try to be robust here. See PR43166. */
2869 || mode == BLKmode
2870 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2872 gcc_assert (MEM_P (op0));
2873 op0 = adjust_address_nv (op0, mode, 0);
2874 return op0;
2877 /* Fall through. */
2879 adjust_mode:
2880 case PAREN_EXPR:
2881 case NOP_EXPR:
2882 case CONVERT_EXPR:
2884 inner_mode = GET_MODE (op0);
2886 if (mode == inner_mode)
2887 return op0;
2889 if (inner_mode == VOIDmode)
2891 if (TREE_CODE (exp) == SSA_NAME)
2892 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2893 else
2894 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2895 if (mode == inner_mode)
2896 return op0;
2899 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2901 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2902 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2903 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2904 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2905 else
2906 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2908 else if (FLOAT_MODE_P (mode))
2910 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2911 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2912 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2913 else
2914 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2916 else if (FLOAT_MODE_P (inner_mode))
2918 if (unsignedp)
2919 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2920 else
2921 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2923 else if (CONSTANT_P (op0)
2924 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2925 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2926 subreg_lowpart_offset (mode,
2927 inner_mode));
2928 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2929 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2930 : unsignedp)
2931 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2932 else
2933 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2935 return op0;
2938 case MEM_REF:
2939 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2941 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2942 TREE_OPERAND (exp, 0),
2943 TREE_OPERAND (exp, 1));
2944 if (newexp)
2945 return expand_debug_expr (newexp);
2947 /* FALLTHROUGH */
2948 case INDIRECT_REF:
2949 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2950 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2951 if (!op0)
2952 return NULL;
2954 if (TREE_CODE (exp) == MEM_REF)
2956 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2957 || (GET_CODE (op0) == PLUS
2958 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2959 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2960 Instead just use get_inner_reference. */
2961 goto component_ref;
2963 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2964 if (!op1 || !CONST_INT_P (op1))
2965 return NULL;
2967 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
2970 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2971 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2972 else
2973 as = ADDR_SPACE_GENERIC;
2975 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2976 op0, as);
2977 if (op0 == NULL_RTX)
2978 return NULL;
2980 op0 = gen_rtx_MEM (mode, op0);
2981 set_mem_attributes (op0, exp, 0);
2982 if (TREE_CODE (exp) == MEM_REF
2983 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2984 set_mem_expr (op0, NULL_TREE);
2985 set_mem_addr_space (op0, as);
2987 return op0;
2989 case TARGET_MEM_REF:
2990 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2991 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2992 return NULL;
2994 op0 = expand_debug_expr
2995 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2996 if (!op0)
2997 return NULL;
2999 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3000 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3001 else
3002 as = ADDR_SPACE_GENERIC;
3004 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3005 op0, as);
3006 if (op0 == NULL_RTX)
3007 return NULL;
3009 op0 = gen_rtx_MEM (mode, op0);
3011 set_mem_attributes (op0, exp, 0);
3012 set_mem_addr_space (op0, as);
3014 return op0;
3016 component_ref:
3017 case ARRAY_REF:
3018 case ARRAY_RANGE_REF:
3019 case COMPONENT_REF:
3020 case BIT_FIELD_REF:
3021 case REALPART_EXPR:
3022 case IMAGPART_EXPR:
3023 case VIEW_CONVERT_EXPR:
3025 enum machine_mode mode1;
3026 HOST_WIDE_INT bitsize, bitpos;
3027 tree offset;
3028 int volatilep = 0;
3029 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3030 &mode1, &unsignedp, &volatilep, false);
3031 rtx orig_op0;
3033 if (bitsize == 0)
3034 return NULL;
3036 orig_op0 = op0 = expand_debug_expr (tem);
3038 if (!op0)
3039 return NULL;
3041 if (offset)
3043 enum machine_mode addrmode, offmode;
3045 if (!MEM_P (op0))
3046 return NULL;
3048 op0 = XEXP (op0, 0);
3049 addrmode = GET_MODE (op0);
3050 if (addrmode == VOIDmode)
3051 addrmode = Pmode;
3053 op1 = expand_debug_expr (offset);
3054 if (!op1)
3055 return NULL;
3057 offmode = GET_MODE (op1);
3058 if (offmode == VOIDmode)
3059 offmode = TYPE_MODE (TREE_TYPE (offset));
3061 if (addrmode != offmode)
3062 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3063 subreg_lowpart_offset (addrmode,
3064 offmode));
3066 /* Don't use offset_address here, we don't need a
3067 recognizable address, and we don't want to generate
3068 code. */
3069 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3070 op0, op1));
3073 if (MEM_P (op0))
3075 if (mode1 == VOIDmode)
3076 /* Bitfield. */
3077 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
3078 if (bitpos >= BITS_PER_UNIT)
3080 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3081 bitpos %= BITS_PER_UNIT;
3083 else if (bitpos < 0)
3085 HOST_WIDE_INT units
3086 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
3087 op0 = adjust_address_nv (op0, mode1, units);
3088 bitpos += units * BITS_PER_UNIT;
3090 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3091 op0 = adjust_address_nv (op0, mode, 0);
3092 else if (GET_MODE (op0) != mode1)
3093 op0 = adjust_address_nv (op0, mode1, 0);
3094 else
3095 op0 = copy_rtx (op0);
3096 if (op0 == orig_op0)
3097 op0 = shallow_copy_rtx (op0);
3098 set_mem_attributes (op0, exp, 0);
3101 if (bitpos == 0 && mode == GET_MODE (op0))
3102 return op0;
3104 if (bitpos < 0)
3105 return NULL;
3107 if (GET_MODE (op0) == BLKmode)
3108 return NULL;
3110 if ((bitpos % BITS_PER_UNIT) == 0
3111 && bitsize == GET_MODE_BITSIZE (mode1))
3113 enum machine_mode opmode = GET_MODE (op0);
3115 if (opmode == VOIDmode)
3116 opmode = TYPE_MODE (TREE_TYPE (tem));
3118 /* This condition may hold if we're expanding the address
3119 right past the end of an array that turned out not to
3120 be addressable (i.e., the address was only computed in
3121 debug stmts). The gen_subreg below would rightfully
3122 crash, and the address doesn't really exist, so just
3123 drop it. */
3124 if (bitpos >= GET_MODE_BITSIZE (opmode))
3125 return NULL;
3127 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3128 return simplify_gen_subreg (mode, op0, opmode,
3129 bitpos / BITS_PER_UNIT);
3132 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3133 && TYPE_UNSIGNED (TREE_TYPE (exp))
3134 ? SIGN_EXTRACT
3135 : ZERO_EXTRACT, mode,
3136 GET_MODE (op0) != VOIDmode
3137 ? GET_MODE (op0)
3138 : TYPE_MODE (TREE_TYPE (tem)),
3139 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3142 case ABS_EXPR:
3143 return simplify_gen_unary (ABS, mode, op0, mode);
3145 case NEGATE_EXPR:
3146 return simplify_gen_unary (NEG, mode, op0, mode);
3148 case BIT_NOT_EXPR:
3149 return simplify_gen_unary (NOT, mode, op0, mode);
3151 case FLOAT_EXPR:
3152 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3153 0)))
3154 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3155 inner_mode);
3157 case FIX_TRUNC_EXPR:
3158 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3159 inner_mode);
3161 case POINTER_PLUS_EXPR:
3162 /* For the rare target where pointers are not the same size as
3163 size_t, we need to check for mis-matched modes and correct
3164 the addend. */
3165 if (op0 && op1
3166 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3167 && GET_MODE (op0) != GET_MODE (op1))
3169 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
3170 /* If OP0 is a partial mode, then we must truncate, even if it has
3171 the same bitsize as OP1 as GCC's representation of partial modes
3172 is opaque. */
3173 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
3174 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
3175 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3176 GET_MODE (op1));
3177 else
3178 /* We always sign-extend, regardless of the signedness of
3179 the operand, because the operand is always unsigned
3180 here even if the original C expression is signed. */
3181 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3182 GET_MODE (op1));
3184 /* Fall through. */
3185 case PLUS_EXPR:
3186 return simplify_gen_binary (PLUS, mode, op0, op1);
3188 case MINUS_EXPR:
3189 return simplify_gen_binary (MINUS, mode, op0, op1);
3191 case MULT_EXPR:
3192 return simplify_gen_binary (MULT, mode, op0, op1);
3194 case RDIV_EXPR:
3195 case TRUNC_DIV_EXPR:
3196 case EXACT_DIV_EXPR:
3197 if (unsignedp)
3198 return simplify_gen_binary (UDIV, mode, op0, op1);
3199 else
3200 return simplify_gen_binary (DIV, mode, op0, op1);
3202 case TRUNC_MOD_EXPR:
3203 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3205 case FLOOR_DIV_EXPR:
3206 if (unsignedp)
3207 return simplify_gen_binary (UDIV, mode, op0, op1);
3208 else
3210 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3211 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3212 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3213 return simplify_gen_binary (PLUS, mode, div, adj);
3216 case FLOOR_MOD_EXPR:
3217 if (unsignedp)
3218 return simplify_gen_binary (UMOD, mode, op0, op1);
3219 else
3221 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3222 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3223 adj = simplify_gen_unary (NEG, mode,
3224 simplify_gen_binary (MULT, mode, adj, op1),
3225 mode);
3226 return simplify_gen_binary (PLUS, mode, mod, adj);
3229 case CEIL_DIV_EXPR:
3230 if (unsignedp)
3232 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3233 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3234 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3235 return simplify_gen_binary (PLUS, mode, div, adj);
3237 else
3239 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3240 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3241 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3242 return simplify_gen_binary (PLUS, mode, div, adj);
3245 case CEIL_MOD_EXPR:
3246 if (unsignedp)
3248 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3249 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3250 adj = simplify_gen_unary (NEG, mode,
3251 simplify_gen_binary (MULT, mode, adj, op1),
3252 mode);
3253 return simplify_gen_binary (PLUS, mode, mod, adj);
3255 else
3257 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3258 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3259 adj = simplify_gen_unary (NEG, mode,
3260 simplify_gen_binary (MULT, mode, adj, op1),
3261 mode);
3262 return simplify_gen_binary (PLUS, mode, mod, adj);
3265 case ROUND_DIV_EXPR:
3266 if (unsignedp)
3268 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3269 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3270 rtx adj = round_udiv_adjust (mode, mod, op1);
3271 return simplify_gen_binary (PLUS, mode, div, adj);
3273 else
3275 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3276 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3277 rtx adj = round_sdiv_adjust (mode, mod, op1);
3278 return simplify_gen_binary (PLUS, mode, div, adj);
3281 case ROUND_MOD_EXPR:
3282 if (unsignedp)
3284 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3285 rtx adj = round_udiv_adjust (mode, mod, op1);
3286 adj = simplify_gen_unary (NEG, mode,
3287 simplify_gen_binary (MULT, mode, adj, op1),
3288 mode);
3289 return simplify_gen_binary (PLUS, mode, mod, adj);
3291 else
3293 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3294 rtx adj = round_sdiv_adjust (mode, mod, op1);
3295 adj = simplify_gen_unary (NEG, mode,
3296 simplify_gen_binary (MULT, mode, adj, op1),
3297 mode);
3298 return simplify_gen_binary (PLUS, mode, mod, adj);
3301 case LSHIFT_EXPR:
3302 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3304 case RSHIFT_EXPR:
3305 if (unsignedp)
3306 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3307 else
3308 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3310 case LROTATE_EXPR:
3311 return simplify_gen_binary (ROTATE, mode, op0, op1);
3313 case RROTATE_EXPR:
3314 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3316 case MIN_EXPR:
3317 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3319 case MAX_EXPR:
3320 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3322 case BIT_AND_EXPR:
3323 case TRUTH_AND_EXPR:
3324 return simplify_gen_binary (AND, mode, op0, op1);
3326 case BIT_IOR_EXPR:
3327 case TRUTH_OR_EXPR:
3328 return simplify_gen_binary (IOR, mode, op0, op1);
3330 case BIT_XOR_EXPR:
3331 case TRUTH_XOR_EXPR:
3332 return simplify_gen_binary (XOR, mode, op0, op1);
3334 case TRUTH_ANDIF_EXPR:
3335 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3337 case TRUTH_ORIF_EXPR:
3338 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3340 case TRUTH_NOT_EXPR:
3341 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3343 case LT_EXPR:
3344 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3345 op0, op1);
3347 case LE_EXPR:
3348 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3349 op0, op1);
3351 case GT_EXPR:
3352 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3353 op0, op1);
3355 case GE_EXPR:
3356 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3357 op0, op1);
3359 case EQ_EXPR:
3360 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3362 case NE_EXPR:
3363 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3365 case UNORDERED_EXPR:
3366 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3368 case ORDERED_EXPR:
3369 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3371 case UNLT_EXPR:
3372 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3374 case UNLE_EXPR:
3375 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3377 case UNGT_EXPR:
3378 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3380 case UNGE_EXPR:
3381 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3383 case UNEQ_EXPR:
3384 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3386 case LTGT_EXPR:
3387 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3389 case COND_EXPR:
3390 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3392 case COMPLEX_EXPR:
3393 gcc_assert (COMPLEX_MODE_P (mode));
3394 if (GET_MODE (op0) == VOIDmode)
3395 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3396 if (GET_MODE (op1) == VOIDmode)
3397 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3398 return gen_rtx_CONCAT (mode, op0, op1);
3400 case CONJ_EXPR:
3401 if (GET_CODE (op0) == CONCAT)
3402 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3403 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3404 XEXP (op0, 1),
3405 GET_MODE_INNER (mode)));
3406 else
3408 enum machine_mode imode = GET_MODE_INNER (mode);
3409 rtx re, im;
3411 if (MEM_P (op0))
3413 re = adjust_address_nv (op0, imode, 0);
3414 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3416 else
3418 enum machine_mode ifmode = int_mode_for_mode (mode);
3419 enum machine_mode ihmode = int_mode_for_mode (imode);
3420 rtx halfsize;
3421 if (ifmode == BLKmode || ihmode == BLKmode)
3422 return NULL;
3423 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3424 re = op0;
3425 if (mode != ifmode)
3426 re = gen_rtx_SUBREG (ifmode, re, 0);
3427 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3428 if (imode != ihmode)
3429 re = gen_rtx_SUBREG (imode, re, 0);
3430 im = copy_rtx (op0);
3431 if (mode != ifmode)
3432 im = gen_rtx_SUBREG (ifmode, im, 0);
3433 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3434 if (imode != ihmode)
3435 im = gen_rtx_SUBREG (imode, im, 0);
3437 im = gen_rtx_NEG (imode, im);
3438 return gen_rtx_CONCAT (mode, re, im);
3441 case ADDR_EXPR:
3442 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3443 if (!op0 || !MEM_P (op0))
3445 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3446 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3447 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3448 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3449 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3450 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3452 if (handled_component_p (TREE_OPERAND (exp, 0)))
3454 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3455 tree decl
3456 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3457 &bitoffset, &bitsize, &maxsize);
3458 if ((TREE_CODE (decl) == VAR_DECL
3459 || TREE_CODE (decl) == PARM_DECL
3460 || TREE_CODE (decl) == RESULT_DECL)
3461 && (!TREE_ADDRESSABLE (decl)
3462 || target_for_debug_bind (decl))
3463 && (bitoffset % BITS_PER_UNIT) == 0
3464 && bitsize > 0
3465 && bitsize == maxsize)
3467 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
3468 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
3472 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
3473 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
3474 == ADDR_EXPR)
3476 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3477 0));
3478 if (op0 != NULL
3479 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3480 || (GET_CODE (op0) == PLUS
3481 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
3482 && CONST_INT_P (XEXP (op0, 1)))))
3484 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
3485 1));
3486 if (!op1 || !CONST_INT_P (op1))
3487 return NULL;
3489 return plus_constant (mode, op0, INTVAL (op1));
3493 return NULL;
3496 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3497 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3499 return op0;
3501 case VECTOR_CST:
3503 unsigned i;
3505 op0 = gen_rtx_CONCATN
3506 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3508 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
3510 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
3511 if (!op1)
3512 return NULL;
3513 XVECEXP (op0, 0, i) = op1;
3516 return op0;
3519 case CONSTRUCTOR:
3520 if (TREE_CLOBBER_P (exp))
3521 return NULL;
3522 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3524 unsigned i;
3525 tree val;
3527 op0 = gen_rtx_CONCATN
3528 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3530 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3532 op1 = expand_debug_expr (val);
3533 if (!op1)
3534 return NULL;
3535 XVECEXP (op0, 0, i) = op1;
3538 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3540 op1 = expand_debug_expr
3541 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3543 if (!op1)
3544 return NULL;
3546 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3547 XVECEXP (op0, 0, i) = op1;
3550 return op0;
3552 else
3553 goto flag_unsupported;
3555 case CALL_EXPR:
3556 /* ??? Maybe handle some builtins? */
3557 return NULL;
3559 case SSA_NAME:
3561 gimple g = get_gimple_for_ssa_name (exp);
3562 if (g)
3564 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3565 if (!op0)
3566 return NULL;
3568 else
3570 int part = var_to_partition (SA.map, exp);
3572 if (part == NO_PARTITION)
3574 /* If this is a reference to an incoming value of parameter
3575 that is never used in the code or where the incoming
3576 value is never used in the code, use PARM_DECL's
3577 DECL_RTL if set. */
3578 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3579 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3581 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3582 if (op0)
3583 goto adjust_mode;
3584 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3585 if (op0)
3586 goto adjust_mode;
3588 return NULL;
3591 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3593 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3595 goto adjust_mode;
3598 case ERROR_MARK:
3599 return NULL;
3601 /* Vector stuff. For most of the codes we don't have rtl codes. */
3602 case REALIGN_LOAD_EXPR:
3603 case REDUC_MAX_EXPR:
3604 case REDUC_MIN_EXPR:
3605 case REDUC_PLUS_EXPR:
3606 case VEC_COND_EXPR:
3607 case VEC_LSHIFT_EXPR:
3608 case VEC_PACK_FIX_TRUNC_EXPR:
3609 case VEC_PACK_SAT_EXPR:
3610 case VEC_PACK_TRUNC_EXPR:
3611 case VEC_RSHIFT_EXPR:
3612 case VEC_UNPACK_FLOAT_HI_EXPR:
3613 case VEC_UNPACK_FLOAT_LO_EXPR:
3614 case VEC_UNPACK_HI_EXPR:
3615 case VEC_UNPACK_LO_EXPR:
3616 case VEC_WIDEN_MULT_HI_EXPR:
3617 case VEC_WIDEN_MULT_LO_EXPR:
3618 case VEC_WIDEN_MULT_EVEN_EXPR:
3619 case VEC_WIDEN_MULT_ODD_EXPR:
3620 case VEC_WIDEN_LSHIFT_HI_EXPR:
3621 case VEC_WIDEN_LSHIFT_LO_EXPR:
3622 case VEC_PERM_EXPR:
3623 return NULL;
3625 /* Misc codes. */
3626 case ADDR_SPACE_CONVERT_EXPR:
3627 case FIXED_CONVERT_EXPR:
3628 case OBJ_TYPE_REF:
3629 case WITH_SIZE_EXPR:
3630 return NULL;
3632 case DOT_PROD_EXPR:
3633 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3634 && SCALAR_INT_MODE_P (mode))
3637 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3638 0)))
3639 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3640 inner_mode);
3642 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3643 1)))
3644 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3645 inner_mode);
3646 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3647 return simplify_gen_binary (PLUS, mode, op0, op2);
3649 return NULL;
3651 case WIDEN_MULT_EXPR:
3652 case WIDEN_MULT_PLUS_EXPR:
3653 case WIDEN_MULT_MINUS_EXPR:
3654 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3655 && SCALAR_INT_MODE_P (mode))
3657 inner_mode = GET_MODE (op0);
3658 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3659 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3660 else
3661 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3662 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3663 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3664 else
3665 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3666 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3667 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3668 return op0;
3669 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3670 return simplify_gen_binary (PLUS, mode, op0, op2);
3671 else
3672 return simplify_gen_binary (MINUS, mode, op2, op0);
3674 return NULL;
3676 case MULT_HIGHPART_EXPR:
3677 /* ??? Similar to the above. */
3678 return NULL;
3680 case WIDEN_SUM_EXPR:
3681 case WIDEN_LSHIFT_EXPR:
3682 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3683 && SCALAR_INT_MODE_P (mode))
3686 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3687 0)))
3688 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3689 inner_mode);
3690 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3691 ? ASHIFT : PLUS, mode, op0, op1);
3693 return NULL;
3695 case FMA_EXPR:
3696 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3698 default:
3699 flag_unsupported:
3700 #ifdef ENABLE_CHECKING
3701 debug_tree (exp);
3702 gcc_unreachable ();
3703 #else
3704 return NULL;
3705 #endif
3709 /* Return an RTX equivalent to the source bind value of the tree expression
3710 EXP. */
3712 static rtx
3713 expand_debug_source_expr (tree exp)
3715 rtx op0 = NULL_RTX;
3716 enum machine_mode mode = VOIDmode, inner_mode;
3718 switch (TREE_CODE (exp))
3720 case PARM_DECL:
3722 mode = DECL_MODE (exp);
3723 op0 = expand_debug_parm_decl (exp);
3724 if (op0)
3725 break;
3726 /* See if this isn't an argument that has been completely
3727 optimized out. */
3728 if (!DECL_RTL_SET_P (exp)
3729 && !DECL_INCOMING_RTL (exp)
3730 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3732 tree aexp = DECL_ORIGIN (exp);
3733 if (DECL_CONTEXT (aexp)
3734 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3736 vec<tree, va_gc> **debug_args;
3737 unsigned int ix;
3738 tree ddecl;
3739 debug_args = decl_debug_args_lookup (current_function_decl);
3740 if (debug_args != NULL)
3742 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
3743 ix += 2)
3744 if (ddecl == aexp)
3745 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3749 break;
3751 default:
3752 break;
3755 if (op0 == NULL_RTX)
3756 return NULL_RTX;
3758 inner_mode = GET_MODE (op0);
3759 if (mode == inner_mode)
3760 return op0;
3762 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3764 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3765 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3766 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3767 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3768 else
3769 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3771 else if (FLOAT_MODE_P (mode))
3772 gcc_unreachable ();
3773 else if (FLOAT_MODE_P (inner_mode))
3775 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3776 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3777 else
3778 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3780 else if (CONSTANT_P (op0)
3781 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3782 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3783 subreg_lowpart_offset (mode, inner_mode));
3784 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3785 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3786 else
3787 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3789 return op0;
3792 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3793 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3794 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
3796 static void
3797 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3799 rtx exp = *exp_p;
3801 if (exp == NULL_RTX)
3802 return;
3804 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3805 return;
3807 if (depth == 4)
3809 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3810 rtx dval = make_debug_expr_from_rtl (exp);
3812 /* Emit a debug bind insn before INSN. */
3813 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3814 DEBUG_EXPR_TREE_DECL (dval), exp,
3815 VAR_INIT_STATUS_INITIALIZED);
3817 emit_debug_insn_before (bind, insn);
3818 *exp_p = dval;
3819 return;
3822 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3823 int i, j;
3824 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3825 switch (*format_ptr++)
3827 case 'e':
3828 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3829 break;
3831 case 'E':
3832 case 'V':
3833 for (j = 0; j < XVECLEN (exp, i); j++)
3834 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3835 break;
3837 default:
3838 break;
3842 /* Expand the _LOCs in debug insns. We run this after expanding all
3843 regular insns, so that any variables referenced in the function
3844 will have their DECL_RTLs set. */
3846 static void
3847 expand_debug_locations (void)
3849 rtx insn;
3850 rtx last = get_last_insn ();
3851 int save_strict_alias = flag_strict_aliasing;
3853 /* New alias sets while setting up memory attributes cause
3854 -fcompare-debug failures, even though it doesn't bring about any
3855 codegen changes. */
3856 flag_strict_aliasing = 0;
3858 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3859 if (DEBUG_INSN_P (insn))
3861 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3862 rtx val, prev_insn, insn2;
3863 enum machine_mode mode;
3865 if (value == NULL_TREE)
3866 val = NULL_RTX;
3867 else
3869 if (INSN_VAR_LOCATION_STATUS (insn)
3870 == VAR_INIT_STATUS_UNINITIALIZED)
3871 val = expand_debug_source_expr (value);
3872 else
3873 val = expand_debug_expr (value);
3874 gcc_assert (last == get_last_insn ());
3877 if (!val)
3878 val = gen_rtx_UNKNOWN_VAR_LOC ();
3879 else
3881 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3883 gcc_assert (mode == GET_MODE (val)
3884 || (GET_MODE (val) == VOIDmode
3885 && (CONST_SCALAR_INT_P (val)
3886 || GET_CODE (val) == CONST_FIXED
3887 || GET_CODE (val) == LABEL_REF)));
3890 INSN_VAR_LOCATION_LOC (insn) = val;
3891 prev_insn = PREV_INSN (insn);
3892 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3893 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
3896 flag_strict_aliasing = save_strict_alias;
3899 /* Expand basic block BB from GIMPLE trees to RTL. */
3901 static basic_block
3902 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
3904 gimple_stmt_iterator gsi;
3905 gimple_seq stmts;
3906 gimple stmt = NULL;
3907 rtx note, last;
3908 edge e;
3909 edge_iterator ei;
3910 void **elt;
3912 if (dump_file)
3913 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3914 bb->index);
3916 /* Note that since we are now transitioning from GIMPLE to RTL, we
3917 cannot use the gsi_*_bb() routines because they expect the basic
3918 block to be in GIMPLE, instead of RTL. Therefore, we need to
3919 access the BB sequence directly. */
3920 stmts = bb_seq (bb);
3921 bb->il.gimple.seq = NULL;
3922 bb->il.gimple.phi_nodes = NULL;
3923 rtl_profile_for_bb (bb);
3924 init_rtl_bb_info (bb);
3925 bb->flags |= BB_RTL;
3927 /* Remove the RETURN_EXPR if we may fall though to the exit
3928 instead. */
3929 gsi = gsi_last (stmts);
3930 if (!gsi_end_p (gsi)
3931 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3933 gimple ret_stmt = gsi_stmt (gsi);
3935 gcc_assert (single_succ_p (bb));
3936 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3938 if (bb->next_bb == EXIT_BLOCK_PTR
3939 && !gimple_return_retval (ret_stmt))
3941 gsi_remove (&gsi, false);
3942 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3946 gsi = gsi_start (stmts);
3947 if (!gsi_end_p (gsi))
3949 stmt = gsi_stmt (gsi);
3950 if (gimple_code (stmt) != GIMPLE_LABEL)
3951 stmt = NULL;
3954 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3956 if (stmt || elt)
3958 last = get_last_insn ();
3960 if (stmt)
3962 expand_gimple_stmt (stmt);
3963 gsi_next (&gsi);
3966 if (elt)
3967 emit_label ((rtx) *elt);
3969 /* Java emits line number notes in the top of labels.
3970 ??? Make this go away once line number notes are obsoleted. */
3971 BB_HEAD (bb) = NEXT_INSN (last);
3972 if (NOTE_P (BB_HEAD (bb)))
3973 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3974 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3976 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3978 else
3979 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3981 NOTE_BASIC_BLOCK (note) = bb;
3983 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3985 basic_block new_bb;
3987 stmt = gsi_stmt (gsi);
3989 /* If this statement is a non-debug one, and we generate debug
3990 insns, then this one might be the last real use of a TERed
3991 SSA_NAME, but where there are still some debug uses further
3992 down. Expanding the current SSA name in such further debug
3993 uses by their RHS might lead to wrong debug info, as coalescing
3994 might make the operands of such RHS be placed into the same
3995 pseudo as something else. Like so:
3996 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3997 use(a_1);
3998 a_2 = ...
3999 #DEBUG ... => a_1
4000 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4001 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4002 the write to a_2 would actually have clobbered the place which
4003 formerly held a_0.
4005 So, instead of that, we recognize the situation, and generate
4006 debug temporaries at the last real use of TERed SSA names:
4007 a_1 = a_0 + 1;
4008 #DEBUG #D1 => a_1
4009 use(a_1);
4010 a_2 = ...
4011 #DEBUG ... => #D1
4013 if (MAY_HAVE_DEBUG_INSNS
4014 && SA.values
4015 && !is_gimple_debug (stmt))
4017 ssa_op_iter iter;
4018 tree op;
4019 gimple def;
4021 location_t sloc = curr_insn_location ();
4023 /* Look for SSA names that have their last use here (TERed
4024 names always have only one real use). */
4025 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4026 if ((def = get_gimple_for_ssa_name (op)))
4028 imm_use_iterator imm_iter;
4029 use_operand_p use_p;
4030 bool have_debug_uses = false;
4032 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4034 if (gimple_debug_bind_p (USE_STMT (use_p)))
4036 have_debug_uses = true;
4037 break;
4041 if (have_debug_uses)
4043 /* OP is a TERed SSA name, with DEF it's defining
4044 statement, and where OP is used in further debug
4045 instructions. Generate a debug temporary, and
4046 replace all uses of OP in debug insns with that
4047 temporary. */
4048 gimple debugstmt;
4049 tree value = gimple_assign_rhs_to_tree (def);
4050 tree vexpr = make_node (DEBUG_EXPR_DECL);
4051 rtx val;
4052 enum machine_mode mode;
4054 set_curr_insn_location (gimple_location (def));
4056 DECL_ARTIFICIAL (vexpr) = 1;
4057 TREE_TYPE (vexpr) = TREE_TYPE (value);
4058 if (DECL_P (value))
4059 mode = DECL_MODE (value);
4060 else
4061 mode = TYPE_MODE (TREE_TYPE (value));
4062 DECL_MODE (vexpr) = mode;
4064 val = gen_rtx_VAR_LOCATION
4065 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4067 emit_debug_insn (val);
4069 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4071 if (!gimple_debug_bind_p (debugstmt))
4072 continue;
4074 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4075 SET_USE (use_p, vexpr);
4077 update_stmt (debugstmt);
4081 set_curr_insn_location (sloc);
4084 currently_expanding_gimple_stmt = stmt;
4086 /* Expand this statement, then evaluate the resulting RTL and
4087 fixup the CFG accordingly. */
4088 if (gimple_code (stmt) == GIMPLE_COND)
4090 new_bb = expand_gimple_cond (bb, stmt);
4091 if (new_bb)
4092 return new_bb;
4094 else if (gimple_debug_bind_p (stmt))
4096 location_t sloc = curr_insn_location ();
4097 gimple_stmt_iterator nsi = gsi;
4099 for (;;)
4101 tree var = gimple_debug_bind_get_var (stmt);
4102 tree value;
4103 rtx val;
4104 enum machine_mode mode;
4106 if (TREE_CODE (var) != DEBUG_EXPR_DECL
4107 && TREE_CODE (var) != LABEL_DECL
4108 && !target_for_debug_bind (var))
4109 goto delink_debug_stmt;
4111 if (gimple_debug_bind_has_value_p (stmt))
4112 value = gimple_debug_bind_get_value (stmt);
4113 else
4114 value = NULL_TREE;
4116 last = get_last_insn ();
4118 set_curr_insn_location (gimple_location (stmt));
4120 if (DECL_P (var))
4121 mode = DECL_MODE (var);
4122 else
4123 mode = TYPE_MODE (TREE_TYPE (var));
4125 val = gen_rtx_VAR_LOCATION
4126 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4128 emit_debug_insn (val);
4130 if (dump_file && (dump_flags & TDF_DETAILS))
4132 /* We can't dump the insn with a TREE where an RTX
4133 is expected. */
4134 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4135 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4136 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4139 delink_debug_stmt:
4140 /* In order not to generate too many debug temporaries,
4141 we delink all uses of debug statements we already expanded.
4142 Therefore debug statements between definition and real
4143 use of TERed SSA names will continue to use the SSA name,
4144 and not be replaced with debug temps. */
4145 delink_stmt_imm_use (stmt);
4147 gsi = nsi;
4148 gsi_next (&nsi);
4149 if (gsi_end_p (nsi))
4150 break;
4151 stmt = gsi_stmt (nsi);
4152 if (!gimple_debug_bind_p (stmt))
4153 break;
4156 set_curr_insn_location (sloc);
4158 else if (gimple_debug_source_bind_p (stmt))
4160 location_t sloc = curr_insn_location ();
4161 tree var = gimple_debug_source_bind_get_var (stmt);
4162 tree value = gimple_debug_source_bind_get_value (stmt);
4163 rtx val;
4164 enum machine_mode mode;
4166 last = get_last_insn ();
4168 set_curr_insn_location (gimple_location (stmt));
4170 mode = DECL_MODE (var);
4172 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4173 VAR_INIT_STATUS_UNINITIALIZED);
4175 emit_debug_insn (val);
4177 if (dump_file && (dump_flags & TDF_DETAILS))
4179 /* We can't dump the insn with a TREE where an RTX
4180 is expected. */
4181 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4182 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4183 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4186 set_curr_insn_location (sloc);
4188 else
4190 if (is_gimple_call (stmt)
4191 && gimple_call_tail_p (stmt)
4192 && disable_tail_calls)
4193 gimple_call_set_tail (stmt, false);
4195 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4197 bool can_fallthru;
4198 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4199 if (new_bb)
4201 if (can_fallthru)
4202 bb = new_bb;
4203 else
4204 return new_bb;
4207 else
4209 def_operand_p def_p;
4210 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4212 if (def_p != NULL)
4214 /* Ignore this stmt if it is in the list of
4215 replaceable expressions. */
4216 if (SA.values
4217 && bitmap_bit_p (SA.values,
4218 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4219 continue;
4221 last = expand_gimple_stmt (stmt);
4222 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4227 currently_expanding_gimple_stmt = NULL;
4229 /* Expand implicit goto and convert goto_locus. */
4230 FOR_EACH_EDGE (e, ei, bb->succs)
4232 if (e->goto_locus != UNKNOWN_LOCATION)
4233 set_curr_insn_location (e->goto_locus);
4234 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4236 emit_jump (label_rtx_for_bb (e->dest));
4237 e->flags &= ~EDGE_FALLTHRU;
4241 /* Expanded RTL can create a jump in the last instruction of block.
4242 This later might be assumed to be a jump to successor and break edge insertion.
4243 We need to insert dummy move to prevent this. PR41440. */
4244 if (single_succ_p (bb)
4245 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4246 && (last = get_last_insn ())
4247 && JUMP_P (last))
4249 rtx dummy = gen_reg_rtx (SImode);
4250 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4253 do_pending_stack_adjust ();
4255 /* Find the block tail. The last insn in the block is the insn
4256 before a barrier and/or table jump insn. */
4257 last = get_last_insn ();
4258 if (BARRIER_P (last))
4259 last = PREV_INSN (last);
4260 if (JUMP_TABLE_DATA_P (last))
4261 last = PREV_INSN (PREV_INSN (last));
4262 BB_END (bb) = last;
4264 update_bb_for_insn (bb);
4266 return bb;
4270 /* Create a basic block for initialization code. */
4272 static basic_block
4273 construct_init_block (void)
4275 basic_block init_block, first_block;
4276 edge e = NULL;
4277 int flags;
4279 /* Multiple entry points not supported yet. */
4280 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4281 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4282 init_rtl_bb_info (EXIT_BLOCK_PTR);
4283 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4284 EXIT_BLOCK_PTR->flags |= BB_RTL;
4286 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4288 /* When entry edge points to first basic block, we don't need jump,
4289 otherwise we have to jump into proper target. */
4290 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4292 tree label = gimple_block_label (e->dest);
4294 emit_jump (label_rtx (label));
4295 flags = 0;
4297 else
4298 flags = EDGE_FALLTHRU;
4300 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4301 get_last_insn (),
4302 ENTRY_BLOCK_PTR);
4303 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4304 init_block->count = ENTRY_BLOCK_PTR->count;
4305 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
4306 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
4307 if (e)
4309 first_block = e->dest;
4310 redirect_edge_succ (e, init_block);
4311 e = make_edge (init_block, first_block, flags);
4313 else
4314 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4315 e->probability = REG_BR_PROB_BASE;
4316 e->count = ENTRY_BLOCK_PTR->count;
4318 update_bb_for_insn (init_block);
4319 return init_block;
4322 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4323 found in the block tree. */
4325 static void
4326 set_block_levels (tree block, int level)
4328 while (block)
4330 BLOCK_NUMBER (block) = level;
4331 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4332 block = BLOCK_CHAIN (block);
4336 /* Create a block containing landing pads and similar stuff. */
4338 static void
4339 construct_exit_block (void)
4341 rtx head = get_last_insn ();
4342 rtx end;
4343 basic_block exit_block;
4344 edge e, e2;
4345 unsigned ix;
4346 edge_iterator ei;
4347 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4349 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4351 /* Make sure the locus is set to the end of the function, so that
4352 epilogue line numbers and warnings are set properly. */
4353 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
4354 input_location = cfun->function_end_locus;
4356 /* Generate rtl for function exit. */
4357 expand_function_end ();
4359 end = get_last_insn ();
4360 if (head == end)
4361 return;
4362 /* While emitting the function end we could move end of the last basic block.
4364 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4365 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4366 head = NEXT_INSN (head);
4367 exit_block = create_basic_block (NEXT_INSN (head), end,
4368 EXIT_BLOCK_PTR->prev_bb);
4369 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4370 exit_block->count = EXIT_BLOCK_PTR->count;
4371 if (current_loops && EXIT_BLOCK_PTR->loop_father)
4372 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
4374 ix = 0;
4375 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4377 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4378 if (!(e->flags & EDGE_ABNORMAL))
4379 redirect_edge_succ (e, exit_block);
4380 else
4381 ix++;
4384 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4385 e->probability = REG_BR_PROB_BASE;
4386 e->count = EXIT_BLOCK_PTR->count;
4387 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4388 if (e2 != e)
4390 e->count -= e2->count;
4391 exit_block->count -= e2->count;
4392 exit_block->frequency -= EDGE_FREQUENCY (e2);
4394 if (e->count < 0)
4395 e->count = 0;
4396 if (exit_block->count < 0)
4397 exit_block->count = 0;
4398 if (exit_block->frequency < 0)
4399 exit_block->frequency = 0;
4400 update_bb_for_insn (exit_block);
4403 /* Helper function for discover_nonconstant_array_refs.
4404 Look for ARRAY_REF nodes with non-constant indexes and mark them
4405 addressable. */
4407 static tree
4408 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4409 void *data ATTRIBUTE_UNUSED)
4411 tree t = *tp;
4413 if (IS_TYPE_OR_DECL_P (t))
4414 *walk_subtrees = 0;
4415 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4417 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4418 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4419 && (!TREE_OPERAND (t, 2)
4420 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4421 || (TREE_CODE (t) == COMPONENT_REF
4422 && (!TREE_OPERAND (t,2)
4423 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4424 || TREE_CODE (t) == BIT_FIELD_REF
4425 || TREE_CODE (t) == REALPART_EXPR
4426 || TREE_CODE (t) == IMAGPART_EXPR
4427 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4428 || CONVERT_EXPR_P (t))
4429 t = TREE_OPERAND (t, 0);
4431 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4433 t = get_base_address (t);
4434 if (t && DECL_P (t)
4435 && DECL_MODE (t) != BLKmode)
4436 TREE_ADDRESSABLE (t) = 1;
4439 *walk_subtrees = 0;
4442 return NULL_TREE;
4445 /* RTL expansion is not able to compile array references with variable
4446 offsets for arrays stored in single register. Discover such
4447 expressions and mark variables as addressable to avoid this
4448 scenario. */
4450 static void
4451 discover_nonconstant_array_refs (void)
4453 basic_block bb;
4454 gimple_stmt_iterator gsi;
4456 FOR_EACH_BB (bb)
4457 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4459 gimple stmt = gsi_stmt (gsi);
4460 if (!is_gimple_debug (stmt))
4461 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4465 /* This function sets crtl->args.internal_arg_pointer to a virtual
4466 register if DRAP is needed. Local register allocator will replace
4467 virtual_incoming_args_rtx with the virtual register. */
4469 static void
4470 expand_stack_alignment (void)
4472 rtx drap_rtx;
4473 unsigned int preferred_stack_boundary;
4475 if (! SUPPORTS_STACK_ALIGNMENT)
4476 return;
4478 if (cfun->calls_alloca
4479 || cfun->has_nonlocal_label
4480 || crtl->has_nonlocal_goto)
4481 crtl->need_drap = true;
4483 /* Call update_stack_boundary here again to update incoming stack
4484 boundary. It may set incoming stack alignment to a different
4485 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4486 use the minimum incoming stack alignment to check if it is OK
4487 to perform sibcall optimization since sibcall optimization will
4488 only align the outgoing stack to incoming stack boundary. */
4489 if (targetm.calls.update_stack_boundary)
4490 targetm.calls.update_stack_boundary ();
4492 /* The incoming stack frame has to be aligned at least at
4493 parm_stack_boundary. */
4494 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4496 /* Update crtl->stack_alignment_estimated and use it later to align
4497 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4498 exceptions since callgraph doesn't collect incoming stack alignment
4499 in this case. */
4500 if (cfun->can_throw_non_call_exceptions
4501 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4502 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4503 else
4504 preferred_stack_boundary = crtl->preferred_stack_boundary;
4505 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4506 crtl->stack_alignment_estimated = preferred_stack_boundary;
4507 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4508 crtl->stack_alignment_needed = preferred_stack_boundary;
4510 gcc_assert (crtl->stack_alignment_needed
4511 <= crtl->stack_alignment_estimated);
4513 crtl->stack_realign_needed
4514 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4515 crtl->stack_realign_tried = crtl->stack_realign_needed;
4517 crtl->stack_realign_processed = true;
4519 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4520 alignment. */
4521 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4522 drap_rtx = targetm.calls.get_drap_rtx ();
4524 /* stack_realign_drap and drap_rtx must match. */
4525 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4527 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4528 if (NULL != drap_rtx)
4530 crtl->args.internal_arg_pointer = drap_rtx;
4532 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4533 needed. */
4534 fixup_tail_calls ();
4538 /* Translate the intermediate representation contained in the CFG
4539 from GIMPLE trees to RTL.
4541 We do conversion per basic block and preserve/update the tree CFG.
4542 This implies we have to do some magic as the CFG can simultaneously
4543 consist of basic blocks containing RTL and GIMPLE trees. This can
4544 confuse the CFG hooks, so be careful to not manipulate CFG during
4545 the expansion. */
4547 static unsigned int
4548 gimple_expand_cfg (void)
4550 basic_block bb, init_block;
4551 sbitmap blocks;
4552 edge_iterator ei;
4553 edge e;
4554 rtx var_seq, var_ret_seq;
4555 unsigned i;
4557 timevar_push (TV_OUT_OF_SSA);
4558 rewrite_out_of_ssa (&SA);
4559 timevar_pop (TV_OUT_OF_SSA);
4560 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
4562 /* Make sure all values used by the optimization passes have sane
4563 defaults. */
4564 reg_renumber = 0;
4566 /* Some backends want to know that we are expanding to RTL. */
4567 currently_expanding_to_rtl = 1;
4568 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
4569 free_dominance_info (CDI_DOMINATORS);
4571 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4573 insn_locations_init ();
4574 if (!DECL_IS_BUILTIN (current_function_decl))
4576 /* Eventually, all FEs should explicitly set function_start_locus. */
4577 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
4578 set_curr_insn_location
4579 (DECL_SOURCE_LOCATION (current_function_decl));
4580 else
4581 set_curr_insn_location (cfun->function_start_locus);
4583 else
4584 set_curr_insn_location (UNKNOWN_LOCATION);
4585 prologue_location = curr_insn_location ();
4587 #ifdef INSN_SCHEDULING
4588 init_sched_attrs ();
4589 #endif
4591 /* Make sure first insn is a note even if we don't want linenums.
4592 This makes sure the first insn will never be deleted.
4593 Also, final expects a note to appear there. */
4594 emit_note (NOTE_INSN_DELETED);
4596 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4597 discover_nonconstant_array_refs ();
4599 targetm.expand_to_rtl_hook ();
4600 crtl->stack_alignment_needed = STACK_BOUNDARY;
4601 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4602 crtl->stack_alignment_estimated = 0;
4603 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4604 cfun->cfg->max_jumptable_ents = 0;
4606 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4607 of the function section at exapnsion time to predict distance of calls. */
4608 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4610 /* Expand the variables recorded during gimple lowering. */
4611 timevar_push (TV_VAR_EXPAND);
4612 start_sequence ();
4614 var_ret_seq = expand_used_vars ();
4616 var_seq = get_insns ();
4617 end_sequence ();
4618 timevar_pop (TV_VAR_EXPAND);
4620 /* Honor stack protection warnings. */
4621 if (warn_stack_protect)
4623 if (cfun->calls_alloca)
4624 warning (OPT_Wstack_protector,
4625 "stack protector not protecting local variables: "
4626 "variable length buffer");
4627 if (has_short_buffer && !crtl->stack_protect_guard)
4628 warning (OPT_Wstack_protector,
4629 "stack protector not protecting function: "
4630 "all local arrays are less than %d bytes long",
4631 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4634 /* Set up parameters and prepare for return, for the function. */
4635 expand_function_start (current_function_decl);
4637 /* If we emitted any instructions for setting up the variables,
4638 emit them before the FUNCTION_START note. */
4639 if (var_seq)
4641 emit_insn_before (var_seq, parm_birth_insn);
4643 /* In expand_function_end we'll insert the alloca save/restore
4644 before parm_birth_insn. We've just insertted an alloca call.
4645 Adjust the pointer to match. */
4646 parm_birth_insn = var_seq;
4649 /* Now that we also have the parameter RTXs, copy them over to our
4650 partitions. */
4651 for (i = 0; i < SA.map->num_partitions; i++)
4653 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4655 if (TREE_CODE (var) != VAR_DECL
4656 && !SA.partition_to_pseudo[i])
4657 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4658 gcc_assert (SA.partition_to_pseudo[i]);
4660 /* If this decl was marked as living in multiple places, reset
4661 this now to NULL. */
4662 if (DECL_RTL_IF_SET (var) == pc_rtx)
4663 SET_DECL_RTL (var, NULL);
4665 /* Some RTL parts really want to look at DECL_RTL(x) when x
4666 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4667 SET_DECL_RTL here making this available, but that would mean
4668 to select one of the potentially many RTLs for one DECL. Instead
4669 of doing that we simply reset the MEM_EXPR of the RTL in question,
4670 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4671 if (!DECL_RTL_SET_P (var))
4673 if (MEM_P (SA.partition_to_pseudo[i]))
4674 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4678 /* If we have a class containing differently aligned pointers
4679 we need to merge those into the corresponding RTL pointer
4680 alignment. */
4681 for (i = 1; i < num_ssa_names; i++)
4683 tree name = ssa_name (i);
4684 int part;
4685 rtx r;
4687 if (!name
4688 /* We might have generated new SSA names in
4689 update_alias_info_with_stack_vars. They will have a NULL
4690 defining statements, and won't be part of the partitioning,
4691 so ignore those. */
4692 || !SSA_NAME_DEF_STMT (name))
4693 continue;
4694 part = var_to_partition (SA.map, name);
4695 if (part == NO_PARTITION)
4696 continue;
4698 /* Adjust all partition members to get the underlying decl of
4699 the representative which we might have created in expand_one_var. */
4700 if (SSA_NAME_VAR (name) == NULL_TREE)
4702 tree leader = partition_to_var (SA.map, part);
4703 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
4704 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
4706 if (!POINTER_TYPE_P (TREE_TYPE (name)))
4707 continue;
4709 r = SA.partition_to_pseudo[part];
4710 if (REG_P (r))
4711 mark_reg_pointer (r, get_pointer_alignment (name));
4714 /* If this function is `main', emit a call to `__main'
4715 to run global initializers, etc. */
4716 if (DECL_NAME (current_function_decl)
4717 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4718 && DECL_FILE_SCOPE_P (current_function_decl))
4719 expand_main_function ();
4721 /* Initialize the stack_protect_guard field. This must happen after the
4722 call to __main (if any) so that the external decl is initialized. */
4723 if (crtl->stack_protect_guard)
4724 stack_protect_prologue ();
4726 expand_phi_nodes (&SA);
4728 /* Register rtl specific functions for cfg. */
4729 rtl_register_cfg_hooks ();
4731 init_block = construct_init_block ();
4733 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4734 remaining edges later. */
4735 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4736 e->flags &= ~EDGE_EXECUTABLE;
4738 lab_rtx_for_bb = pointer_map_create ();
4739 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4740 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
4742 if (MAY_HAVE_DEBUG_INSNS)
4743 expand_debug_locations ();
4745 /* Free stuff we no longer need after GIMPLE optimizations. */
4746 free_dominance_info (CDI_DOMINATORS);
4747 free_dominance_info (CDI_POST_DOMINATORS);
4748 delete_tree_cfg_annotations ();
4750 timevar_push (TV_OUT_OF_SSA);
4751 finish_out_of_ssa (&SA);
4752 timevar_pop (TV_OUT_OF_SSA);
4754 timevar_push (TV_POST_EXPAND);
4755 /* We are no longer in SSA form. */
4756 cfun->gimple_df->in_ssa_p = false;
4757 if (current_loops)
4758 loops_state_clear (LOOP_CLOSED_SSA);
4760 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4761 conservatively to true until they are all profile aware. */
4762 pointer_map_destroy (lab_rtx_for_bb);
4763 free_histograms ();
4765 construct_exit_block ();
4766 insn_locations_finalize ();
4768 if (var_ret_seq)
4770 rtx after = return_label;
4771 rtx next = NEXT_INSN (after);
4772 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
4773 after = next;
4774 emit_insn_after (var_ret_seq, after);
4777 /* Zap the tree EH table. */
4778 set_eh_throw_stmt_table (cfun, NULL);
4780 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4781 split edges which edge insertions might do. */
4782 rebuild_jump_labels (get_insns ());
4784 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4786 edge e;
4787 edge_iterator ei;
4788 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4790 if (e->insns.r)
4792 rebuild_jump_labels_chain (e->insns.r);
4793 /* Put insns after parm birth, but before
4794 NOTE_INSNS_FUNCTION_BEG. */
4795 if (e->src == ENTRY_BLOCK_PTR
4796 && single_succ_p (ENTRY_BLOCK_PTR))
4798 rtx insns = e->insns.r;
4799 e->insns.r = NULL_RTX;
4800 if (NOTE_P (parm_birth_insn)
4801 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
4802 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
4803 else
4804 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4806 else
4807 commit_one_edge_insertion (e);
4809 else
4810 ei_next (&ei);
4814 /* We're done expanding trees to RTL. */
4815 currently_expanding_to_rtl = 0;
4817 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4819 edge e;
4820 edge_iterator ei;
4821 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4823 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4824 e->flags &= ~EDGE_EXECUTABLE;
4826 /* At the moment not all abnormal edges match the RTL
4827 representation. It is safe to remove them here as
4828 find_many_sub_basic_blocks will rediscover them.
4829 In the future we should get this fixed properly. */
4830 if ((e->flags & EDGE_ABNORMAL)
4831 && !(e->flags & EDGE_SIBCALL))
4832 remove_edge (e);
4833 else
4834 ei_next (&ei);
4838 blocks = sbitmap_alloc (last_basic_block);
4839 bitmap_ones (blocks);
4840 find_many_sub_basic_blocks (blocks);
4841 sbitmap_free (blocks);
4842 purge_all_dead_edges ();
4844 expand_stack_alignment ();
4846 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
4847 function. */
4848 if (crtl->tail_call_emit)
4849 fixup_tail_calls ();
4851 /* After initial rtl generation, call back to finish generating
4852 exception support code. We need to do this before cleaning up
4853 the CFG as the code does not expect dead landing pads. */
4854 if (cfun->eh->region_tree != NULL)
4855 finish_eh_generation ();
4857 /* Remove unreachable blocks, otherwise we cannot compute dominators
4858 which are needed for loop state verification. As a side-effect
4859 this also compacts blocks.
4860 ??? We cannot remove trivially dead insns here as for example
4861 the DRAP reg on i?86 is not magically live at this point.
4862 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
4863 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4865 #ifdef ENABLE_CHECKING
4866 verify_flow_info ();
4867 #endif
4869 /* Initialize pseudos allocated for hard registers. */
4870 emit_initial_value_sets ();
4872 /* And finally unshare all RTL. */
4873 unshare_all_rtl ();
4875 /* There's no need to defer outputting this function any more; we
4876 know we want to output it. */
4877 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4879 /* Now that we're done expanding trees to RTL, we shouldn't have any
4880 more CONCATs anywhere. */
4881 generating_concat_p = 0;
4883 if (dump_file)
4885 fprintf (dump_file,
4886 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4887 /* And the pass manager will dump RTL for us. */
4890 /* If we're emitting a nested function, make sure its parent gets
4891 emitted as well. Doing otherwise confuses debug info. */
4893 tree parent;
4894 for (parent = DECL_CONTEXT (current_function_decl);
4895 parent != NULL_TREE;
4896 parent = get_containing_scope (parent))
4897 if (TREE_CODE (parent) == FUNCTION_DECL)
4898 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4901 /* We are now committed to emitting code for this function. Do any
4902 preparation, such as emitting abstract debug info for the inline
4903 before it gets mangled by optimization. */
4904 if (cgraph_function_possibly_inlined_p (current_function_decl))
4905 (*debug_hooks->outlining_inline_function) (current_function_decl);
4907 TREE_ASM_WRITTEN (current_function_decl) = 1;
4909 /* After expanding, the return labels are no longer needed. */
4910 return_label = NULL;
4911 naked_return_label = NULL;
4913 /* After expanding, the tm_restart map is no longer needed. */
4914 if (cfun->gimple_df->tm_restart)
4916 htab_delete (cfun->gimple_df->tm_restart);
4917 cfun->gimple_df->tm_restart = NULL;
4920 /* Tag the blocks with a depth number so that change_scope can find
4921 the common parent easily. */
4922 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4923 default_rtl_profile ();
4925 timevar_pop (TV_POST_EXPAND);
4927 return 0;
4930 namespace {
4932 const pass_data pass_data_expand =
4934 RTL_PASS, /* type */
4935 "expand", /* name */
4936 OPTGROUP_NONE, /* optinfo_flags */
4937 false, /* has_gate */
4938 true, /* has_execute */
4939 TV_EXPAND, /* tv_id */
4940 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
4941 | PROP_gimple_lcx
4942 | PROP_gimple_lvec ), /* properties_required */
4943 PROP_rtl, /* properties_provided */
4944 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
4945 ( TODO_verify_ssa | TODO_verify_flow
4946 | TODO_verify_stmts ), /* todo_flags_start */
4947 0, /* todo_flags_finish */
4950 class pass_expand : public rtl_opt_pass
4952 public:
4953 pass_expand (gcc::context *ctxt)
4954 : rtl_opt_pass (pass_data_expand, ctxt)
4957 /* opt_pass methods: */
4958 unsigned int execute () { return gimple_expand_cfg (); }
4960 }; // class pass_expand
4962 } // anon namespace
4964 rtl_opt_pass *
4965 make_pass_expand (gcc::context *ctxt)
4967 return new pass_expand (ctxt);