PR middle-end/59175
[official-gcc.git] / gcc / cfgexpand.c
blob85732c8f0d7596966c284165dc5b2d1ed07b5f56
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "tree.h"
27 #include "tm_p.h"
28 #include "basic-block.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "langhooks.h"
32 #include "bitmap.h"
33 #include "gimple.h"
34 #include "gimple-iterator.h"
35 #include "gimple-walk.h"
36 #include "gimple-ssa.h"
37 #include "cgraph.h"
38 #include "tree-cfg.h"
39 #include "tree-phinodes.h"
40 #include "ssa-iterators.h"
41 #include "tree-ssanames.h"
42 #include "tree-dfa.h"
43 #include "tree-ssa.h"
44 #include "tree-pass.h"
45 #include "except.h"
46 #include "flags.h"
47 #include "diagnostic.h"
48 #include "gimple-pretty-print.h"
49 #include "toplev.h"
50 #include "debug.h"
51 #include "params.h"
52 #include "tree-inline.h"
53 #include "value-prof.h"
54 #include "target.h"
55 #include "tree-ssa-live.h"
56 #include "tree-outof-ssa.h"
57 #include "sbitmap.h"
58 #include "cfgloop.h"
59 #include "regs.h" /* For reg_renumber. */
60 #include "insn-attr.h" /* For INSN_SCHEDULING. */
61 #include "asan.h"
62 #include "tree-ssa-address.h"
63 #include "recog.h"
64 #include "output.h"
66 /* Some systems use __main in a way incompatible with its use in gcc, in these
67 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
68 give the same symbol without quotes for an alternative entry point. You
69 must define both, or neither. */
70 #ifndef NAME__MAIN
71 #define NAME__MAIN "__main"
72 #endif
74 /* This variable holds information helping the rewriting of SSA trees
75 into RTL. */
76 struct ssaexpand SA;
78 /* This variable holds the currently expanded gimple statement for purposes
79 of comminucating the profile info to the builtin expanders. */
80 gimple currently_expanding_gimple_stmt;
82 static rtx expand_debug_expr (tree);
84 /* Return an expression tree corresponding to the RHS of GIMPLE
85 statement STMT. */
87 tree
88 gimple_assign_rhs_to_tree (gimple stmt)
90 tree t;
91 enum gimple_rhs_class grhs_class;
93 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
95 if (grhs_class == GIMPLE_TERNARY_RHS)
96 t = build3 (gimple_assign_rhs_code (stmt),
97 TREE_TYPE (gimple_assign_lhs (stmt)),
98 gimple_assign_rhs1 (stmt),
99 gimple_assign_rhs2 (stmt),
100 gimple_assign_rhs3 (stmt));
101 else if (grhs_class == GIMPLE_BINARY_RHS)
102 t = build2 (gimple_assign_rhs_code (stmt),
103 TREE_TYPE (gimple_assign_lhs (stmt)),
104 gimple_assign_rhs1 (stmt),
105 gimple_assign_rhs2 (stmt));
106 else if (grhs_class == GIMPLE_UNARY_RHS)
107 t = build1 (gimple_assign_rhs_code (stmt),
108 TREE_TYPE (gimple_assign_lhs (stmt)),
109 gimple_assign_rhs1 (stmt));
110 else if (grhs_class == GIMPLE_SINGLE_RHS)
112 t = gimple_assign_rhs1 (stmt);
113 /* Avoid modifying this tree in place below. */
114 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
115 && gimple_location (stmt) != EXPR_LOCATION (t))
116 || (gimple_block (stmt)
117 && currently_expanding_to_rtl
118 && EXPR_P (t)))
119 t = copy_node (t);
121 else
122 gcc_unreachable ();
124 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
125 SET_EXPR_LOCATION (t, gimple_location (stmt));
127 return t;
131 #ifndef STACK_ALIGNMENT_NEEDED
132 #define STACK_ALIGNMENT_NEEDED 1
133 #endif
135 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
137 /* Associate declaration T with storage space X. If T is no
138 SSA name this is exactly SET_DECL_RTL, otherwise make the
139 partition of T associated with X. */
140 static inline void
141 set_rtl (tree t, rtx x)
143 if (TREE_CODE (t) == SSA_NAME)
145 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
146 if (x && !MEM_P (x))
147 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
148 /* For the benefit of debug information at -O0 (where vartracking
149 doesn't run) record the place also in the base DECL if it's
150 a normal variable (not a parameter). */
151 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
153 tree var = SSA_NAME_VAR (t);
154 /* If we don't yet have something recorded, just record it now. */
155 if (!DECL_RTL_SET_P (var))
156 SET_DECL_RTL (var, x);
157 /* If we have it set already to "multiple places" don't
158 change this. */
159 else if (DECL_RTL (var) == pc_rtx)
161 /* If we have something recorded and it's not the same place
162 as we want to record now, we have multiple partitions for the
163 same base variable, with different places. We can't just
164 randomly chose one, hence we have to say that we don't know.
165 This only happens with optimization, and there var-tracking
166 will figure out the right thing. */
167 else if (DECL_RTL (var) != x)
168 SET_DECL_RTL (var, pc_rtx);
171 else
172 SET_DECL_RTL (t, x);
175 /* This structure holds data relevant to one variable that will be
176 placed in a stack slot. */
177 struct stack_var
179 /* The Variable. */
180 tree decl;
182 /* Initially, the size of the variable. Later, the size of the partition,
183 if this variable becomes it's partition's representative. */
184 HOST_WIDE_INT size;
186 /* The *byte* alignment required for this variable. Or as, with the
187 size, the alignment for this partition. */
188 unsigned int alignb;
190 /* The partition representative. */
191 size_t representative;
193 /* The next stack variable in the partition, or EOC. */
194 size_t next;
196 /* The numbers of conflicting stack variables. */
197 bitmap conflicts;
200 #define EOC ((size_t)-1)
202 /* We have an array of such objects while deciding allocation. */
203 static struct stack_var *stack_vars;
204 static size_t stack_vars_alloc;
205 static size_t stack_vars_num;
206 static struct pointer_map_t *decl_to_stack_part;
208 /* Conflict bitmaps go on this obstack. This allows us to destroy
209 all of them in one big sweep. */
210 static bitmap_obstack stack_var_bitmap_obstack;
212 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
213 is non-decreasing. */
214 static size_t *stack_vars_sorted;
216 /* The phase of the stack frame. This is the known misalignment of
217 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
218 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
219 static int frame_phase;
221 /* Used during expand_used_vars to remember if we saw any decls for
222 which we'd like to enable stack smashing protection. */
223 static bool has_protected_decls;
225 /* Used during expand_used_vars. Remember if we say a character buffer
226 smaller than our cutoff threshold. Used for -Wstack-protector. */
227 static bool has_short_buffer;
229 /* Compute the byte alignment to use for DECL. Ignore alignment
230 we can't do with expected alignment of the stack boundary. */
232 static unsigned int
233 align_local_variable (tree decl)
235 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
236 DECL_ALIGN (decl) = align;
237 return align / BITS_PER_UNIT;
240 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
241 Return the frame offset. */
243 static HOST_WIDE_INT
244 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
246 HOST_WIDE_INT offset, new_frame_offset;
248 new_frame_offset = frame_offset;
249 if (FRAME_GROWS_DOWNWARD)
251 new_frame_offset -= size + frame_phase;
252 new_frame_offset &= -align;
253 new_frame_offset += frame_phase;
254 offset = new_frame_offset;
256 else
258 new_frame_offset -= frame_phase;
259 new_frame_offset += align - 1;
260 new_frame_offset &= -align;
261 new_frame_offset += frame_phase;
262 offset = new_frame_offset;
263 new_frame_offset += size;
265 frame_offset = new_frame_offset;
267 if (frame_offset_overflow (frame_offset, cfun->decl))
268 frame_offset = offset = 0;
270 return offset;
273 /* Accumulate DECL into STACK_VARS. */
275 static void
276 add_stack_var (tree decl)
278 struct stack_var *v;
280 if (stack_vars_num >= stack_vars_alloc)
282 if (stack_vars_alloc)
283 stack_vars_alloc = stack_vars_alloc * 3 / 2;
284 else
285 stack_vars_alloc = 32;
286 stack_vars
287 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
289 if (!decl_to_stack_part)
290 decl_to_stack_part = pointer_map_create ();
292 v = &stack_vars[stack_vars_num];
293 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
295 v->decl = decl;
296 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
297 /* Ensure that all variables have size, so that &a != &b for any two
298 variables that are simultaneously live. */
299 if (v->size == 0)
300 v->size = 1;
301 v->alignb = align_local_variable (SSAVAR (decl));
302 /* An alignment of zero can mightily confuse us later. */
303 gcc_assert (v->alignb != 0);
305 /* All variables are initially in their own partition. */
306 v->representative = stack_vars_num;
307 v->next = EOC;
309 /* All variables initially conflict with no other. */
310 v->conflicts = NULL;
312 /* Ensure that this decl doesn't get put onto the list twice. */
313 set_rtl (decl, pc_rtx);
315 stack_vars_num++;
318 /* Make the decls associated with luid's X and Y conflict. */
320 static void
321 add_stack_var_conflict (size_t x, size_t y)
323 struct stack_var *a = &stack_vars[x];
324 struct stack_var *b = &stack_vars[y];
325 if (!a->conflicts)
326 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
327 if (!b->conflicts)
328 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
329 bitmap_set_bit (a->conflicts, y);
330 bitmap_set_bit (b->conflicts, x);
333 /* Check whether the decls associated with luid's X and Y conflict. */
335 static bool
336 stack_var_conflict_p (size_t x, size_t y)
338 struct stack_var *a = &stack_vars[x];
339 struct stack_var *b = &stack_vars[y];
340 if (x == y)
341 return false;
342 /* Partitions containing an SSA name result from gimple registers
343 with things like unsupported modes. They are top-level and
344 hence conflict with everything else. */
345 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
346 return true;
348 if (!a->conflicts || !b->conflicts)
349 return false;
350 return bitmap_bit_p (a->conflicts, y);
353 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
354 enter its partition number into bitmap DATA. */
356 static bool
357 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
359 bitmap active = (bitmap)data;
360 op = get_base_address (op);
361 if (op
362 && DECL_P (op)
363 && DECL_RTL_IF_SET (op) == pc_rtx)
365 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
366 if (v)
367 bitmap_set_bit (active, *v);
369 return false;
372 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
373 record conflicts between it and all currently active other partitions
374 from bitmap DATA. */
376 static bool
377 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
379 bitmap active = (bitmap)data;
380 op = get_base_address (op);
381 if (op
382 && DECL_P (op)
383 && DECL_RTL_IF_SET (op) == pc_rtx)
385 size_t *v =
386 (size_t *) pointer_map_contains (decl_to_stack_part, op);
387 if (v && bitmap_set_bit (active, *v))
389 size_t num = *v;
390 bitmap_iterator bi;
391 unsigned i;
392 gcc_assert (num < stack_vars_num);
393 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
394 add_stack_var_conflict (num, i);
397 return false;
400 /* Helper routine for add_scope_conflicts, calculating the active partitions
401 at the end of BB, leaving the result in WORK. We're called to generate
402 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
403 liveness. */
405 static void
406 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
408 edge e;
409 edge_iterator ei;
410 gimple_stmt_iterator gsi;
411 bool (*visit)(gimple, tree, void *);
413 bitmap_clear (work);
414 FOR_EACH_EDGE (e, ei, bb->preds)
415 bitmap_ior_into (work, (bitmap)e->src->aux);
417 visit = visit_op;
419 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
421 gimple stmt = gsi_stmt (gsi);
422 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
424 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
426 gimple stmt = gsi_stmt (gsi);
428 if (gimple_clobber_p (stmt))
430 tree lhs = gimple_assign_lhs (stmt);
431 size_t *v;
432 /* Nested function lowering might introduce LHSs
433 that are COMPONENT_REFs. */
434 if (TREE_CODE (lhs) != VAR_DECL)
435 continue;
436 if (DECL_RTL_IF_SET (lhs) == pc_rtx
437 && (v = (size_t *)
438 pointer_map_contains (decl_to_stack_part, lhs)))
439 bitmap_clear_bit (work, *v);
441 else if (!is_gimple_debug (stmt))
443 if (for_conflict
444 && visit == visit_op)
446 /* If this is the first real instruction in this BB we need
447 to add conflicts for everything live at this point now.
448 Unlike classical liveness for named objects we can't
449 rely on seeing a def/use of the names we're interested in.
450 There might merely be indirect loads/stores. We'd not add any
451 conflicts for such partitions. */
452 bitmap_iterator bi;
453 unsigned i;
454 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
456 struct stack_var *a = &stack_vars[i];
457 if (!a->conflicts)
458 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
459 bitmap_ior_into (a->conflicts, work);
461 visit = visit_conflict;
463 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
468 /* Generate stack partition conflicts between all partitions that are
469 simultaneously live. */
471 static void
472 add_scope_conflicts (void)
474 basic_block bb;
475 bool changed;
476 bitmap work = BITMAP_ALLOC (NULL);
477 int *rpo;
478 int n_bbs;
480 /* We approximate the live range of a stack variable by taking the first
481 mention of its name as starting point(s), and by the end-of-scope
482 death clobber added by gimplify as ending point(s) of the range.
483 This overapproximates in the case we for instance moved an address-taken
484 operation upward, without also moving a dereference to it upwards.
485 But it's conservatively correct as a variable never can hold values
486 before its name is mentioned at least once.
488 We then do a mostly classical bitmap liveness algorithm. */
490 FOR_ALL_BB (bb)
491 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
493 rpo = XNEWVEC (int, last_basic_block);
494 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
496 changed = true;
497 while (changed)
499 int i;
500 changed = false;
501 for (i = 0; i < n_bbs; i++)
503 bitmap active;
504 bb = BASIC_BLOCK (rpo[i]);
505 active = (bitmap)bb->aux;
506 add_scope_conflicts_1 (bb, work, false);
507 if (bitmap_ior_into (active, work))
508 changed = true;
512 FOR_EACH_BB (bb)
513 add_scope_conflicts_1 (bb, work, true);
515 free (rpo);
516 BITMAP_FREE (work);
517 FOR_ALL_BB (bb)
518 BITMAP_FREE (bb->aux);
521 /* A subroutine of partition_stack_vars. A comparison function for qsort,
522 sorting an array of indices by the properties of the object. */
524 static int
525 stack_var_cmp (const void *a, const void *b)
527 size_t ia = *(const size_t *)a;
528 size_t ib = *(const size_t *)b;
529 unsigned int aligna = stack_vars[ia].alignb;
530 unsigned int alignb = stack_vars[ib].alignb;
531 HOST_WIDE_INT sizea = stack_vars[ia].size;
532 HOST_WIDE_INT sizeb = stack_vars[ib].size;
533 tree decla = stack_vars[ia].decl;
534 tree declb = stack_vars[ib].decl;
535 bool largea, largeb;
536 unsigned int uida, uidb;
538 /* Primary compare on "large" alignment. Large comes first. */
539 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
540 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
541 if (largea != largeb)
542 return (int)largeb - (int)largea;
544 /* Secondary compare on size, decreasing */
545 if (sizea > sizeb)
546 return -1;
547 if (sizea < sizeb)
548 return 1;
550 /* Tertiary compare on true alignment, decreasing. */
551 if (aligna < alignb)
552 return -1;
553 if (aligna > alignb)
554 return 1;
556 /* Final compare on ID for sort stability, increasing.
557 Two SSA names are compared by their version, SSA names come before
558 non-SSA names, and two normal decls are compared by their DECL_UID. */
559 if (TREE_CODE (decla) == SSA_NAME)
561 if (TREE_CODE (declb) == SSA_NAME)
562 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
563 else
564 return -1;
566 else if (TREE_CODE (declb) == SSA_NAME)
567 return 1;
568 else
569 uida = DECL_UID (decla), uidb = DECL_UID (declb);
570 if (uida < uidb)
571 return 1;
572 if (uida > uidb)
573 return -1;
574 return 0;
578 /* If the points-to solution *PI points to variables that are in a partition
579 together with other variables add all partition members to the pointed-to
580 variables bitmap. */
582 static void
583 add_partitioned_vars_to_ptset (struct pt_solution *pt,
584 struct pointer_map_t *decls_to_partitions,
585 struct pointer_set_t *visited, bitmap temp)
587 bitmap_iterator bi;
588 unsigned i;
589 bitmap *part;
591 if (pt->anything
592 || pt->vars == NULL
593 /* The pointed-to vars bitmap is shared, it is enough to
594 visit it once. */
595 || pointer_set_insert (visited, pt->vars))
596 return;
598 bitmap_clear (temp);
600 /* By using a temporary bitmap to store all members of the partitions
601 we have to add we make sure to visit each of the partitions only
602 once. */
603 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
604 if ((!temp
605 || !bitmap_bit_p (temp, i))
606 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
607 (void *)(size_t) i)))
608 bitmap_ior_into (temp, *part);
609 if (!bitmap_empty_p (temp))
610 bitmap_ior_into (pt->vars, temp);
613 /* Update points-to sets based on partition info, so we can use them on RTL.
614 The bitmaps representing stack partitions will be saved until expand,
615 where partitioned decls used as bases in memory expressions will be
616 rewritten. */
618 static void
619 update_alias_info_with_stack_vars (void)
621 struct pointer_map_t *decls_to_partitions = NULL;
622 size_t i, j;
623 tree var = NULL_TREE;
625 for (i = 0; i < stack_vars_num; i++)
627 bitmap part = NULL;
628 tree name;
629 struct ptr_info_def *pi;
631 /* Not interested in partitions with single variable. */
632 if (stack_vars[i].representative != i
633 || stack_vars[i].next == EOC)
634 continue;
636 if (!decls_to_partitions)
638 decls_to_partitions = pointer_map_create ();
639 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
642 /* Create an SSA_NAME that points to the partition for use
643 as base during alias-oracle queries on RTL for bases that
644 have been partitioned. */
645 if (var == NULL_TREE)
646 var = create_tmp_var (ptr_type_node, NULL);
647 name = make_ssa_name (var, NULL);
649 /* Create bitmaps representing partitions. They will be used for
650 points-to sets later, so use GGC alloc. */
651 part = BITMAP_GGC_ALLOC ();
652 for (j = i; j != EOC; j = stack_vars[j].next)
654 tree decl = stack_vars[j].decl;
655 unsigned int uid = DECL_PT_UID (decl);
656 bitmap_set_bit (part, uid);
657 *((bitmap *) pointer_map_insert (decls_to_partitions,
658 (void *)(size_t) uid)) = part;
659 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
660 decl)) = name;
661 if (TREE_ADDRESSABLE (decl))
662 TREE_ADDRESSABLE (name) = 1;
665 /* Make the SSA name point to all partition members. */
666 pi = get_ptr_info (name);
667 pt_solution_set (&pi->pt, part, false);
670 /* Make all points-to sets that contain one member of a partition
671 contain all members of the partition. */
672 if (decls_to_partitions)
674 unsigned i;
675 struct pointer_set_t *visited = pointer_set_create ();
676 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
678 for (i = 1; i < num_ssa_names; i++)
680 tree name = ssa_name (i);
681 struct ptr_info_def *pi;
683 if (name
684 && POINTER_TYPE_P (TREE_TYPE (name))
685 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
686 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
687 visited, temp);
690 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
691 decls_to_partitions, visited, temp);
693 pointer_set_destroy (visited);
694 pointer_map_destroy (decls_to_partitions);
695 BITMAP_FREE (temp);
699 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
700 partitioning algorithm. Partitions A and B are known to be non-conflicting.
701 Merge them into a single partition A. */
703 static void
704 union_stack_vars (size_t a, size_t b)
706 struct stack_var *vb = &stack_vars[b];
707 bitmap_iterator bi;
708 unsigned u;
710 gcc_assert (stack_vars[b].next == EOC);
711 /* Add B to A's partition. */
712 stack_vars[b].next = stack_vars[a].next;
713 stack_vars[b].representative = a;
714 stack_vars[a].next = b;
716 /* Update the required alignment of partition A to account for B. */
717 if (stack_vars[a].alignb < stack_vars[b].alignb)
718 stack_vars[a].alignb = stack_vars[b].alignb;
720 /* Update the interference graph and merge the conflicts. */
721 if (vb->conflicts)
723 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
724 add_stack_var_conflict (a, stack_vars[u].representative);
725 BITMAP_FREE (vb->conflicts);
729 /* A subroutine of expand_used_vars. Binpack the variables into
730 partitions constrained by the interference graph. The overall
731 algorithm used is as follows:
733 Sort the objects by size in descending order.
734 For each object A {
735 S = size(A)
736 O = 0
737 loop {
738 Look for the largest non-conflicting object B with size <= S.
739 UNION (A, B)
744 static void
745 partition_stack_vars (void)
747 size_t si, sj, n = stack_vars_num;
749 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
750 for (si = 0; si < n; ++si)
751 stack_vars_sorted[si] = si;
753 if (n == 1)
754 return;
756 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
758 for (si = 0; si < n; ++si)
760 size_t i = stack_vars_sorted[si];
761 unsigned int ialign = stack_vars[i].alignb;
762 HOST_WIDE_INT isize = stack_vars[i].size;
764 /* Ignore objects that aren't partition representatives. If we
765 see a var that is not a partition representative, it must
766 have been merged earlier. */
767 if (stack_vars[i].representative != i)
768 continue;
770 for (sj = si + 1; sj < n; ++sj)
772 size_t j = stack_vars_sorted[sj];
773 unsigned int jalign = stack_vars[j].alignb;
774 HOST_WIDE_INT jsize = stack_vars[j].size;
776 /* Ignore objects that aren't partition representatives. */
777 if (stack_vars[j].representative != j)
778 continue;
780 /* Do not mix objects of "small" (supported) alignment
781 and "large" (unsupported) alignment. */
782 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
783 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
784 break;
786 /* For Address Sanitizer do not mix objects with different
787 sizes, as the shorter vars wouldn't be adequately protected.
788 Don't do that for "large" (unsupported) alignment objects,
789 those aren't protected anyway. */
790 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
791 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
792 break;
794 /* Ignore conflicting objects. */
795 if (stack_var_conflict_p (i, j))
796 continue;
798 /* UNION the objects, placing J at OFFSET. */
799 union_stack_vars (i, j);
803 update_alias_info_with_stack_vars ();
806 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
808 static void
809 dump_stack_var_partition (void)
811 size_t si, i, j, n = stack_vars_num;
813 for (si = 0; si < n; ++si)
815 i = stack_vars_sorted[si];
817 /* Skip variables that aren't partition representatives, for now. */
818 if (stack_vars[i].representative != i)
819 continue;
821 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
822 " align %u\n", (unsigned long) i, stack_vars[i].size,
823 stack_vars[i].alignb);
825 for (j = i; j != EOC; j = stack_vars[j].next)
827 fputc ('\t', dump_file);
828 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
830 fputc ('\n', dump_file);
834 /* Assign rtl to DECL at BASE + OFFSET. */
836 static void
837 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
838 HOST_WIDE_INT offset)
840 unsigned align;
841 rtx x;
843 /* If this fails, we've overflowed the stack frame. Error nicely? */
844 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
846 x = plus_constant (Pmode, base, offset);
847 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
849 if (TREE_CODE (decl) != SSA_NAME)
851 /* Set alignment we actually gave this decl if it isn't an SSA name.
852 If it is we generate stack slots only accidentally so it isn't as
853 important, we'll simply use the alignment that is already set. */
854 if (base == virtual_stack_vars_rtx)
855 offset -= frame_phase;
856 align = offset & -offset;
857 align *= BITS_PER_UNIT;
858 if (align == 0 || align > base_align)
859 align = base_align;
861 /* One would think that we could assert that we're not decreasing
862 alignment here, but (at least) the i386 port does exactly this
863 via the MINIMUM_ALIGNMENT hook. */
865 DECL_ALIGN (decl) = align;
866 DECL_USER_ALIGN (decl) = 0;
869 set_mem_attributes (x, SSAVAR (decl), true);
870 set_rtl (decl, x);
873 struct stack_vars_data
875 /* Vector of offset pairs, always end of some padding followed
876 by start of the padding that needs Address Sanitizer protection.
877 The vector is in reversed, highest offset pairs come first. */
878 vec<HOST_WIDE_INT> asan_vec;
880 /* Vector of partition representative decls in between the paddings. */
881 vec<tree> asan_decl_vec;
884 /* A subroutine of expand_used_vars. Give each partition representative
885 a unique location within the stack frame. Update each partition member
886 with that location. */
888 static void
889 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
891 size_t si, i, j, n = stack_vars_num;
892 HOST_WIDE_INT large_size = 0, large_alloc = 0;
893 rtx large_base = NULL;
894 unsigned large_align = 0;
895 tree decl;
897 /* Determine if there are any variables requiring "large" alignment.
898 Since these are dynamically allocated, we only process these if
899 no predicate involved. */
900 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
901 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
903 /* Find the total size of these variables. */
904 for (si = 0; si < n; ++si)
906 unsigned alignb;
908 i = stack_vars_sorted[si];
909 alignb = stack_vars[i].alignb;
911 /* Stop when we get to the first decl with "small" alignment. */
912 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
913 break;
915 /* Skip variables that aren't partition representatives. */
916 if (stack_vars[i].representative != i)
917 continue;
919 /* Skip variables that have already had rtl assigned. See also
920 add_stack_var where we perpetrate this pc_rtx hack. */
921 decl = stack_vars[i].decl;
922 if ((TREE_CODE (decl) == SSA_NAME
923 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
924 : DECL_RTL (decl)) != pc_rtx)
925 continue;
927 large_size += alignb - 1;
928 large_size &= -(HOST_WIDE_INT)alignb;
929 large_size += stack_vars[i].size;
932 /* If there were any, allocate space. */
933 if (large_size > 0)
934 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
935 large_align, true);
938 for (si = 0; si < n; ++si)
940 rtx base;
941 unsigned base_align, alignb;
942 HOST_WIDE_INT offset;
944 i = stack_vars_sorted[si];
946 /* Skip variables that aren't partition representatives, for now. */
947 if (stack_vars[i].representative != i)
948 continue;
950 /* Skip variables that have already had rtl assigned. See also
951 add_stack_var where we perpetrate this pc_rtx hack. */
952 decl = stack_vars[i].decl;
953 if ((TREE_CODE (decl) == SSA_NAME
954 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
955 : DECL_RTL (decl)) != pc_rtx)
956 continue;
958 /* Check the predicate to see whether this variable should be
959 allocated in this pass. */
960 if (pred && !pred (i))
961 continue;
963 alignb = stack_vars[i].alignb;
964 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
966 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
968 HOST_WIDE_INT prev_offset = frame_offset;
969 tree repr_decl = NULL_TREE;
971 offset
972 = alloc_stack_frame_space (stack_vars[i].size
973 + ASAN_RED_ZONE_SIZE,
974 MAX (alignb, ASAN_RED_ZONE_SIZE));
975 data->asan_vec.safe_push (prev_offset);
976 data->asan_vec.safe_push (offset + stack_vars[i].size);
977 /* Find best representative of the partition.
978 Prefer those with DECL_NAME, even better
979 satisfying asan_protect_stack_decl predicate. */
980 for (j = i; j != EOC; j = stack_vars[j].next)
981 if (asan_protect_stack_decl (stack_vars[j].decl)
982 && DECL_NAME (stack_vars[j].decl))
984 repr_decl = stack_vars[j].decl;
985 break;
987 else if (repr_decl == NULL_TREE
988 && DECL_P (stack_vars[j].decl)
989 && DECL_NAME (stack_vars[j].decl))
990 repr_decl = stack_vars[j].decl;
991 if (repr_decl == NULL_TREE)
992 repr_decl = stack_vars[i].decl;
993 data->asan_decl_vec.safe_push (repr_decl);
995 else
996 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
997 base = virtual_stack_vars_rtx;
998 base_align = crtl->max_used_stack_slot_alignment;
1000 else
1002 /* Large alignment is only processed in the last pass. */
1003 if (pred)
1004 continue;
1005 gcc_assert (large_base != NULL);
1007 large_alloc += alignb - 1;
1008 large_alloc &= -(HOST_WIDE_INT)alignb;
1009 offset = large_alloc;
1010 large_alloc += stack_vars[i].size;
1012 base = large_base;
1013 base_align = large_align;
1016 /* Create rtl for each variable based on their location within the
1017 partition. */
1018 for (j = i; j != EOC; j = stack_vars[j].next)
1020 expand_one_stack_var_at (stack_vars[j].decl,
1021 base, base_align,
1022 offset);
1026 gcc_assert (large_alloc == large_size);
1029 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1030 static HOST_WIDE_INT
1031 account_stack_vars (void)
1033 size_t si, j, i, n = stack_vars_num;
1034 HOST_WIDE_INT size = 0;
1036 for (si = 0; si < n; ++si)
1038 i = stack_vars_sorted[si];
1040 /* Skip variables that aren't partition representatives, for now. */
1041 if (stack_vars[i].representative != i)
1042 continue;
1044 size += stack_vars[i].size;
1045 for (j = i; j != EOC; j = stack_vars[j].next)
1046 set_rtl (stack_vars[j].decl, NULL);
1048 return size;
1051 /* A subroutine of expand_one_var. Called to immediately assign rtl
1052 to a variable to be allocated in the stack frame. */
1054 static void
1055 expand_one_stack_var (tree var)
1057 HOST_WIDE_INT size, offset;
1058 unsigned byte_align;
1060 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1061 byte_align = align_local_variable (SSAVAR (var));
1063 /* We handle highly aligned variables in expand_stack_vars. */
1064 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1066 offset = alloc_stack_frame_space (size, byte_align);
1068 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1069 crtl->max_used_stack_slot_alignment, offset);
1072 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1073 that will reside in a hard register. */
1075 static void
1076 expand_one_hard_reg_var (tree var)
1078 rest_of_decl_compilation (var, 0, 0);
1081 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1082 that will reside in a pseudo register. */
1084 static void
1085 expand_one_register_var (tree var)
1087 tree decl = SSAVAR (var);
1088 tree type = TREE_TYPE (decl);
1089 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1090 rtx x = gen_reg_rtx (reg_mode);
1092 set_rtl (var, x);
1094 /* Note if the object is a user variable. */
1095 if (!DECL_ARTIFICIAL (decl))
1096 mark_user_reg (x);
1098 if (POINTER_TYPE_P (type))
1099 mark_reg_pointer (x, get_pointer_alignment (var));
1102 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1103 has some associated error, e.g. its type is error-mark. We just need
1104 to pick something that won't crash the rest of the compiler. */
1106 static void
1107 expand_one_error_var (tree var)
1109 enum machine_mode mode = DECL_MODE (var);
1110 rtx x;
1112 if (mode == BLKmode)
1113 x = gen_rtx_MEM (BLKmode, const0_rtx);
1114 else if (mode == VOIDmode)
1115 x = const0_rtx;
1116 else
1117 x = gen_reg_rtx (mode);
1119 SET_DECL_RTL (var, x);
1122 /* A subroutine of expand_one_var. VAR is a variable that will be
1123 allocated to the local stack frame. Return true if we wish to
1124 add VAR to STACK_VARS so that it will be coalesced with other
1125 variables. Return false to allocate VAR immediately.
1127 This function is used to reduce the number of variables considered
1128 for coalescing, which reduces the size of the quadratic problem. */
1130 static bool
1131 defer_stack_allocation (tree var, bool toplevel)
1133 /* Whether the variable is small enough for immediate allocation not to be
1134 a problem with regard to the frame size. */
1135 bool smallish
1136 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1137 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1139 /* If stack protection is enabled, *all* stack variables must be deferred,
1140 so that we can re-order the strings to the top of the frame.
1141 Similarly for Address Sanitizer. */
1142 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
1143 return true;
1145 /* We handle "large" alignment via dynamic allocation. We want to handle
1146 this extra complication in only one place, so defer them. */
1147 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1148 return true;
1150 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1151 might be detached from their block and appear at toplevel when we reach
1152 here. We want to coalesce them with variables from other blocks when
1153 the immediate contribution to the frame size would be noticeable. */
1154 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1155 return true;
1157 /* Variables declared in the outermost scope automatically conflict
1158 with every other variable. The only reason to want to defer them
1159 at all is that, after sorting, we can more efficiently pack
1160 small variables in the stack frame. Continue to defer at -O2. */
1161 if (toplevel && optimize < 2)
1162 return false;
1164 /* Without optimization, *most* variables are allocated from the
1165 stack, which makes the quadratic problem large exactly when we
1166 want compilation to proceed as quickly as possible. On the
1167 other hand, we don't want the function's stack frame size to
1168 get completely out of hand. So we avoid adding scalars and
1169 "small" aggregates to the list at all. */
1170 if (optimize == 0 && smallish)
1171 return false;
1173 return true;
1176 /* A subroutine of expand_used_vars. Expand one variable according to
1177 its flavor. Variables to be placed on the stack are not actually
1178 expanded yet, merely recorded.
1179 When REALLY_EXPAND is false, only add stack values to be allocated.
1180 Return stack usage this variable is supposed to take.
1183 static HOST_WIDE_INT
1184 expand_one_var (tree var, bool toplevel, bool really_expand)
1186 unsigned int align = BITS_PER_UNIT;
1187 tree origvar = var;
1189 var = SSAVAR (var);
1191 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1193 /* Because we don't know if VAR will be in register or on stack,
1194 we conservatively assume it will be on stack even if VAR is
1195 eventually put into register after RA pass. For non-automatic
1196 variables, which won't be on stack, we collect alignment of
1197 type and ignore user specified alignment. */
1198 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1199 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1200 TYPE_MODE (TREE_TYPE (var)),
1201 TYPE_ALIGN (TREE_TYPE (var)));
1202 else if (DECL_HAS_VALUE_EXPR_P (var)
1203 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1204 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1205 or variables which were assigned a stack slot already by
1206 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1207 changed from the offset chosen to it. */
1208 align = crtl->stack_alignment_estimated;
1209 else
1210 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1212 /* If the variable alignment is very large we'll dynamicaly allocate
1213 it, which means that in-frame portion is just a pointer. */
1214 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1215 align = POINTER_SIZE;
1218 if (SUPPORTS_STACK_ALIGNMENT
1219 && crtl->stack_alignment_estimated < align)
1221 /* stack_alignment_estimated shouldn't change after stack
1222 realign decision made */
1223 gcc_assert (!crtl->stack_realign_processed);
1224 crtl->stack_alignment_estimated = align;
1227 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1228 So here we only make sure stack_alignment_needed >= align. */
1229 if (crtl->stack_alignment_needed < align)
1230 crtl->stack_alignment_needed = align;
1231 if (crtl->max_used_stack_slot_alignment < align)
1232 crtl->max_used_stack_slot_alignment = align;
1234 if (TREE_CODE (origvar) == SSA_NAME)
1236 gcc_assert (TREE_CODE (var) != VAR_DECL
1237 || (!DECL_EXTERNAL (var)
1238 && !DECL_HAS_VALUE_EXPR_P (var)
1239 && !TREE_STATIC (var)
1240 && TREE_TYPE (var) != error_mark_node
1241 && !DECL_HARD_REGISTER (var)
1242 && really_expand));
1244 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1246 else if (DECL_EXTERNAL (var))
1248 else if (DECL_HAS_VALUE_EXPR_P (var))
1250 else if (TREE_STATIC (var))
1252 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1254 else if (TREE_TYPE (var) == error_mark_node)
1256 if (really_expand)
1257 expand_one_error_var (var);
1259 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1261 if (really_expand)
1262 expand_one_hard_reg_var (var);
1264 else if (use_register_for_decl (var))
1266 if (really_expand)
1267 expand_one_register_var (origvar);
1269 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1271 /* Reject variables which cover more than half of the address-space. */
1272 if (really_expand)
1274 error ("size of variable %q+D is too large", var);
1275 expand_one_error_var (var);
1278 else if (defer_stack_allocation (var, toplevel))
1279 add_stack_var (origvar);
1280 else
1282 if (really_expand)
1283 expand_one_stack_var (origvar);
1284 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1286 return 0;
1289 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1290 expanding variables. Those variables that can be put into registers
1291 are allocated pseudos; those that can't are put on the stack.
1293 TOPLEVEL is true if this is the outermost BLOCK. */
1295 static void
1296 expand_used_vars_for_block (tree block, bool toplevel)
1298 tree t;
1300 /* Expand all variables at this level. */
1301 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1302 if (TREE_USED (t)
1303 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1304 || !DECL_NONSHAREABLE (t)))
1305 expand_one_var (t, toplevel, true);
1307 /* Expand all variables at containing levels. */
1308 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1309 expand_used_vars_for_block (t, false);
1312 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1313 and clear TREE_USED on all local variables. */
1315 static void
1316 clear_tree_used (tree block)
1318 tree t;
1320 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1321 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1322 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1323 || !DECL_NONSHAREABLE (t))
1324 TREE_USED (t) = 0;
1326 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1327 clear_tree_used (t);
1330 enum {
1331 SPCT_FLAG_DEFAULT = 1,
1332 SPCT_FLAG_ALL = 2,
1333 SPCT_FLAG_STRONG = 3
1336 /* Examine TYPE and determine a bit mask of the following features. */
1338 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1339 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1340 #define SPCT_HAS_ARRAY 4
1341 #define SPCT_HAS_AGGREGATE 8
1343 static unsigned int
1344 stack_protect_classify_type (tree type)
1346 unsigned int ret = 0;
1347 tree t;
1349 switch (TREE_CODE (type))
1351 case ARRAY_TYPE:
1352 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1353 if (t == char_type_node
1354 || t == signed_char_type_node
1355 || t == unsigned_char_type_node)
1357 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1358 unsigned HOST_WIDE_INT len;
1360 if (!TYPE_SIZE_UNIT (type)
1361 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1362 len = max;
1363 else
1364 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1366 if (len < max)
1367 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1368 else
1369 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1371 else
1372 ret = SPCT_HAS_ARRAY;
1373 break;
1375 case UNION_TYPE:
1376 case QUAL_UNION_TYPE:
1377 case RECORD_TYPE:
1378 ret = SPCT_HAS_AGGREGATE;
1379 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1380 if (TREE_CODE (t) == FIELD_DECL)
1381 ret |= stack_protect_classify_type (TREE_TYPE (t));
1382 break;
1384 default:
1385 break;
1388 return ret;
1391 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1392 part of the local stack frame. Remember if we ever return nonzero for
1393 any variable in this function. The return value is the phase number in
1394 which the variable should be allocated. */
1396 static int
1397 stack_protect_decl_phase (tree decl)
1399 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1400 int ret = 0;
1402 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1403 has_short_buffer = true;
1405 if (flag_stack_protect == SPCT_FLAG_ALL
1406 || flag_stack_protect == SPCT_FLAG_STRONG)
1408 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1409 && !(bits & SPCT_HAS_AGGREGATE))
1410 ret = 1;
1411 else if (bits & SPCT_HAS_ARRAY)
1412 ret = 2;
1414 else
1415 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1417 if (ret)
1418 has_protected_decls = true;
1420 return ret;
1423 /* Two helper routines that check for phase 1 and phase 2. These are used
1424 as callbacks for expand_stack_vars. */
1426 static bool
1427 stack_protect_decl_phase_1 (size_t i)
1429 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1432 static bool
1433 stack_protect_decl_phase_2 (size_t i)
1435 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1438 /* And helper function that checks for asan phase (with stack protector
1439 it is phase 3). This is used as callback for expand_stack_vars.
1440 Returns true if any of the vars in the partition need to be protected. */
1442 static bool
1443 asan_decl_phase_3 (size_t i)
1445 while (i != EOC)
1447 if (asan_protect_stack_decl (stack_vars[i].decl))
1448 return true;
1449 i = stack_vars[i].next;
1451 return false;
1454 /* Ensure that variables in different stack protection phases conflict
1455 so that they are not merged and share the same stack slot. */
1457 static void
1458 add_stack_protection_conflicts (void)
1460 size_t i, j, n = stack_vars_num;
1461 unsigned char *phase;
1463 phase = XNEWVEC (unsigned char, n);
1464 for (i = 0; i < n; ++i)
1465 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1467 for (i = 0; i < n; ++i)
1469 unsigned char ph_i = phase[i];
1470 for (j = i + 1; j < n; ++j)
1471 if (ph_i != phase[j])
1472 add_stack_var_conflict (i, j);
1475 XDELETEVEC (phase);
1478 /* Create a decl for the guard at the top of the stack frame. */
1480 static void
1481 create_stack_guard (void)
1483 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1484 VAR_DECL, NULL, ptr_type_node);
1485 TREE_THIS_VOLATILE (guard) = 1;
1486 TREE_USED (guard) = 1;
1487 expand_one_stack_var (guard);
1488 crtl->stack_protect_guard = guard;
1491 /* Prepare for expanding variables. */
1492 static void
1493 init_vars_expansion (void)
1495 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1496 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1498 /* A map from decl to stack partition. */
1499 decl_to_stack_part = pointer_map_create ();
1501 /* Initialize local stack smashing state. */
1502 has_protected_decls = false;
1503 has_short_buffer = false;
1506 /* Free up stack variable graph data. */
1507 static void
1508 fini_vars_expansion (void)
1510 bitmap_obstack_release (&stack_var_bitmap_obstack);
1511 if (stack_vars)
1512 XDELETEVEC (stack_vars);
1513 if (stack_vars_sorted)
1514 XDELETEVEC (stack_vars_sorted);
1515 stack_vars = NULL;
1516 stack_vars_sorted = NULL;
1517 stack_vars_alloc = stack_vars_num = 0;
1518 pointer_map_destroy (decl_to_stack_part);
1519 decl_to_stack_part = NULL;
1522 /* Make a fair guess for the size of the stack frame of the function
1523 in NODE. This doesn't have to be exact, the result is only used in
1524 the inline heuristics. So we don't want to run the full stack var
1525 packing algorithm (which is quadratic in the number of stack vars).
1526 Instead, we calculate the total size of all stack vars. This turns
1527 out to be a pretty fair estimate -- packing of stack vars doesn't
1528 happen very often. */
1530 HOST_WIDE_INT
1531 estimated_stack_frame_size (struct cgraph_node *node)
1533 HOST_WIDE_INT size = 0;
1534 size_t i;
1535 tree var;
1536 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1538 push_cfun (fn);
1540 init_vars_expansion ();
1542 FOR_EACH_LOCAL_DECL (fn, i, var)
1543 if (auto_var_in_fn_p (var, fn->decl))
1544 size += expand_one_var (var, true, false);
1546 if (stack_vars_num > 0)
1548 /* Fake sorting the stack vars for account_stack_vars (). */
1549 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1550 for (i = 0; i < stack_vars_num; ++i)
1551 stack_vars_sorted[i] = i;
1552 size += account_stack_vars ();
1555 fini_vars_expansion ();
1556 pop_cfun ();
1557 return size;
1560 /* Helper routine to check if a record or union contains an array field. */
1562 static int
1563 record_or_union_type_has_array_p (const_tree tree_type)
1565 tree fields = TYPE_FIELDS (tree_type);
1566 tree f;
1568 for (f = fields; f; f = DECL_CHAIN (f))
1569 if (TREE_CODE (f) == FIELD_DECL)
1571 tree field_type = TREE_TYPE (f);
1572 if (RECORD_OR_UNION_TYPE_P (field_type)
1573 && record_or_union_type_has_array_p (field_type))
1574 return 1;
1575 if (TREE_CODE (field_type) == ARRAY_TYPE)
1576 return 1;
1578 return 0;
1581 /* Expand all variables used in the function. */
1583 static rtx
1584 expand_used_vars (void)
1586 tree var, outer_block = DECL_INITIAL (current_function_decl);
1587 vec<tree> maybe_local_decls = vNULL;
1588 rtx var_end_seq = NULL_RTX;
1589 struct pointer_map_t *ssa_name_decls;
1590 unsigned i;
1591 unsigned len;
1592 bool gen_stack_protect_signal = false;
1594 /* Compute the phase of the stack frame for this function. */
1596 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1597 int off = STARTING_FRAME_OFFSET % align;
1598 frame_phase = off ? align - off : 0;
1601 /* Set TREE_USED on all variables in the local_decls. */
1602 FOR_EACH_LOCAL_DECL (cfun, i, var)
1603 TREE_USED (var) = 1;
1604 /* Clear TREE_USED on all variables associated with a block scope. */
1605 clear_tree_used (DECL_INITIAL (current_function_decl));
1607 init_vars_expansion ();
1609 ssa_name_decls = pointer_map_create ();
1610 for (i = 0; i < SA.map->num_partitions; i++)
1612 tree var = partition_to_var (SA.map, i);
1614 gcc_assert (!virtual_operand_p (var));
1616 /* Assign decls to each SSA name partition, share decls for partitions
1617 we could have coalesced (those with the same type). */
1618 if (SSA_NAME_VAR (var) == NULL_TREE)
1620 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1621 if (!*slot)
1622 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1623 replace_ssa_name_symbol (var, (tree) *slot);
1626 /* Always allocate space for partitions based on VAR_DECLs. But for
1627 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1628 debug info, there is no need to do so if optimization is disabled
1629 because all the SSA_NAMEs based on these DECLs have been coalesced
1630 into a single partition, which is thus assigned the canonical RTL
1631 location of the DECLs. */
1632 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1633 expand_one_var (var, true, true);
1634 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize)
1636 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1637 contain the default def (representing the parm or result itself)
1638 we don't do anything here. But those which don't contain the
1639 default def (representing a temporary based on the parm/result)
1640 we need to allocate space just like for normal VAR_DECLs. */
1641 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1643 expand_one_var (var, true, true);
1644 gcc_assert (SA.partition_to_pseudo[i]);
1648 pointer_map_destroy (ssa_name_decls);
1650 if (flag_stack_protect == SPCT_FLAG_STRONG)
1651 FOR_EACH_LOCAL_DECL (cfun, i, var)
1652 if (!is_global_var (var))
1654 tree var_type = TREE_TYPE (var);
1655 /* Examine local referenced variables that have their addresses taken,
1656 contain an array, or are arrays. */
1657 if (TREE_CODE (var) == VAR_DECL
1658 && (TREE_CODE (var_type) == ARRAY_TYPE
1659 || TREE_ADDRESSABLE (var)
1660 || (RECORD_OR_UNION_TYPE_P (var_type)
1661 && record_or_union_type_has_array_p (var_type))))
1663 gen_stack_protect_signal = true;
1664 break;
1668 /* At this point all variables on the local_decls with TREE_USED
1669 set are not associated with any block scope. Lay them out. */
1671 len = vec_safe_length (cfun->local_decls);
1672 FOR_EACH_LOCAL_DECL (cfun, i, var)
1674 bool expand_now = false;
1676 /* Expanded above already. */
1677 if (is_gimple_reg (var))
1679 TREE_USED (var) = 0;
1680 goto next;
1682 /* We didn't set a block for static or extern because it's hard
1683 to tell the difference between a global variable (re)declared
1684 in a local scope, and one that's really declared there to
1685 begin with. And it doesn't really matter much, since we're
1686 not giving them stack space. Expand them now. */
1687 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1688 expand_now = true;
1690 /* Expand variables not associated with any block now. Those created by
1691 the optimizers could be live anywhere in the function. Those that
1692 could possibly have been scoped originally and detached from their
1693 block will have their allocation deferred so we coalesce them with
1694 others when optimization is enabled. */
1695 else if (TREE_USED (var))
1696 expand_now = true;
1698 /* Finally, mark all variables on the list as used. We'll use
1699 this in a moment when we expand those associated with scopes. */
1700 TREE_USED (var) = 1;
1702 if (expand_now)
1703 expand_one_var (var, true, true);
1705 next:
1706 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1708 rtx rtl = DECL_RTL_IF_SET (var);
1710 /* Keep artificial non-ignored vars in cfun->local_decls
1711 chain until instantiate_decls. */
1712 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1713 add_local_decl (cfun, var);
1714 else if (rtl == NULL_RTX)
1715 /* If rtl isn't set yet, which can happen e.g. with
1716 -fstack-protector, retry before returning from this
1717 function. */
1718 maybe_local_decls.safe_push (var);
1722 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1724 +-----------------+-----------------+
1725 | ...processed... | ...duplicates...|
1726 +-----------------+-----------------+
1728 +-- LEN points here.
1730 We just want the duplicates, as those are the artificial
1731 non-ignored vars that we want to keep until instantiate_decls.
1732 Move them down and truncate the array. */
1733 if (!vec_safe_is_empty (cfun->local_decls))
1734 cfun->local_decls->block_remove (0, len);
1736 /* At this point, all variables within the block tree with TREE_USED
1737 set are actually used by the optimized function. Lay them out. */
1738 expand_used_vars_for_block (outer_block, true);
1740 if (stack_vars_num > 0)
1742 add_scope_conflicts ();
1744 /* If stack protection is enabled, we don't share space between
1745 vulnerable data and non-vulnerable data. */
1746 if (flag_stack_protect)
1747 add_stack_protection_conflicts ();
1749 /* Now that we have collected all stack variables, and have computed a
1750 minimal interference graph, attempt to save some stack space. */
1751 partition_stack_vars ();
1752 if (dump_file)
1753 dump_stack_var_partition ();
1756 switch (flag_stack_protect)
1758 case SPCT_FLAG_ALL:
1759 create_stack_guard ();
1760 break;
1762 case SPCT_FLAG_STRONG:
1763 if (gen_stack_protect_signal
1764 || cfun->calls_alloca || has_protected_decls)
1765 create_stack_guard ();
1766 break;
1768 case SPCT_FLAG_DEFAULT:
1769 if (cfun->calls_alloca || has_protected_decls)
1770 create_stack_guard ();
1771 break;
1773 default:
1777 /* Assign rtl to each variable based on these partitions. */
1778 if (stack_vars_num > 0)
1780 struct stack_vars_data data;
1782 data.asan_vec = vNULL;
1783 data.asan_decl_vec = vNULL;
1785 /* Reorder decls to be protected by iterating over the variables
1786 array multiple times, and allocating out of each phase in turn. */
1787 /* ??? We could probably integrate this into the qsort we did
1788 earlier, such that we naturally see these variables first,
1789 and thus naturally allocate things in the right order. */
1790 if (has_protected_decls)
1792 /* Phase 1 contains only character arrays. */
1793 expand_stack_vars (stack_protect_decl_phase_1, &data);
1795 /* Phase 2 contains other kinds of arrays. */
1796 if (flag_stack_protect == 2)
1797 expand_stack_vars (stack_protect_decl_phase_2, &data);
1800 if (flag_sanitize & SANITIZE_ADDRESS)
1801 /* Phase 3, any partitions that need asan protection
1802 in addition to phase 1 and 2. */
1803 expand_stack_vars (asan_decl_phase_3, &data);
1805 if (!data.asan_vec.is_empty ())
1807 HOST_WIDE_INT prev_offset = frame_offset;
1808 HOST_WIDE_INT offset
1809 = alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
1810 ASAN_RED_ZONE_SIZE);
1811 data.asan_vec.safe_push (prev_offset);
1812 data.asan_vec.safe_push (offset);
1814 var_end_seq
1815 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1816 data.asan_vec.address (),
1817 data.asan_decl_vec. address (),
1818 data.asan_vec.length ());
1821 expand_stack_vars (NULL, &data);
1823 data.asan_vec.release ();
1824 data.asan_decl_vec.release ();
1827 fini_vars_expansion ();
1829 /* If there were any artificial non-ignored vars without rtl
1830 found earlier, see if deferred stack allocation hasn't assigned
1831 rtl to them. */
1832 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1834 rtx rtl = DECL_RTL_IF_SET (var);
1836 /* Keep artificial non-ignored vars in cfun->local_decls
1837 chain until instantiate_decls. */
1838 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1839 add_local_decl (cfun, var);
1841 maybe_local_decls.release ();
1843 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1844 if (STACK_ALIGNMENT_NEEDED)
1846 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1847 if (!FRAME_GROWS_DOWNWARD)
1848 frame_offset += align - 1;
1849 frame_offset &= -align;
1852 return var_end_seq;
1856 /* If we need to produce a detailed dump, print the tree representation
1857 for STMT to the dump file. SINCE is the last RTX after which the RTL
1858 generated for STMT should have been appended. */
1860 static void
1861 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1863 if (dump_file && (dump_flags & TDF_DETAILS))
1865 fprintf (dump_file, "\n;; ");
1866 print_gimple_stmt (dump_file, stmt, 0,
1867 TDF_SLIM | (dump_flags & TDF_LINENO));
1868 fprintf (dump_file, "\n");
1870 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1874 /* Maps the blocks that do not contain tree labels to rtx labels. */
1876 static struct pointer_map_t *lab_rtx_for_bb;
1878 /* Returns the label_rtx expression for a label starting basic block BB. */
1880 static rtx
1881 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1883 gimple_stmt_iterator gsi;
1884 tree lab;
1885 gimple lab_stmt;
1886 void **elt;
1888 if (bb->flags & BB_RTL)
1889 return block_label (bb);
1891 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1892 if (elt)
1893 return (rtx) *elt;
1895 /* Find the tree label if it is present. */
1897 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1899 lab_stmt = gsi_stmt (gsi);
1900 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1901 break;
1903 lab = gimple_label_label (lab_stmt);
1904 if (DECL_NONLOCAL (lab))
1905 break;
1907 return label_rtx (lab);
1910 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1911 *elt = gen_label_rtx ();
1912 return (rtx) *elt;
1916 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1917 of a basic block where we just expanded the conditional at the end,
1918 possibly clean up the CFG and instruction sequence. LAST is the
1919 last instruction before the just emitted jump sequence. */
1921 static void
1922 maybe_cleanup_end_of_block (edge e, rtx last)
1924 /* Special case: when jumpif decides that the condition is
1925 trivial it emits an unconditional jump (and the necessary
1926 barrier). But we still have two edges, the fallthru one is
1927 wrong. purge_dead_edges would clean this up later. Unfortunately
1928 we have to insert insns (and split edges) before
1929 find_many_sub_basic_blocks and hence before purge_dead_edges.
1930 But splitting edges might create new blocks which depend on the
1931 fact that if there are two edges there's no barrier. So the
1932 barrier would get lost and verify_flow_info would ICE. Instead
1933 of auditing all edge splitters to care for the barrier (which
1934 normally isn't there in a cleaned CFG), fix it here. */
1935 if (BARRIER_P (get_last_insn ()))
1937 rtx insn;
1938 remove_edge (e);
1939 /* Now, we have a single successor block, if we have insns to
1940 insert on the remaining edge we potentially will insert
1941 it at the end of this block (if the dest block isn't feasible)
1942 in order to avoid splitting the edge. This insertion will take
1943 place in front of the last jump. But we might have emitted
1944 multiple jumps (conditional and one unconditional) to the
1945 same destination. Inserting in front of the last one then
1946 is a problem. See PR 40021. We fix this by deleting all
1947 jumps except the last unconditional one. */
1948 insn = PREV_INSN (get_last_insn ());
1949 /* Make sure we have an unconditional jump. Otherwise we're
1950 confused. */
1951 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1952 for (insn = PREV_INSN (insn); insn != last;)
1954 insn = PREV_INSN (insn);
1955 if (JUMP_P (NEXT_INSN (insn)))
1957 if (!any_condjump_p (NEXT_INSN (insn)))
1959 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1960 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1962 delete_insn (NEXT_INSN (insn));
1968 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1969 Returns a new basic block if we've terminated the current basic
1970 block and created a new one. */
1972 static basic_block
1973 expand_gimple_cond (basic_block bb, gimple stmt)
1975 basic_block new_bb, dest;
1976 edge new_edge;
1977 edge true_edge;
1978 edge false_edge;
1979 rtx last2, last;
1980 enum tree_code code;
1981 tree op0, op1;
1983 code = gimple_cond_code (stmt);
1984 op0 = gimple_cond_lhs (stmt);
1985 op1 = gimple_cond_rhs (stmt);
1986 /* We're sometimes presented with such code:
1987 D.123_1 = x < y;
1988 if (D.123_1 != 0)
1990 This would expand to two comparisons which then later might
1991 be cleaned up by combine. But some pattern matchers like if-conversion
1992 work better when there's only one compare, so make up for this
1993 here as special exception if TER would have made the same change. */
1994 if (SA.values
1995 && TREE_CODE (op0) == SSA_NAME
1996 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
1997 && TREE_CODE (op1) == INTEGER_CST
1998 && ((gimple_cond_code (stmt) == NE_EXPR
1999 && integer_zerop (op1))
2000 || (gimple_cond_code (stmt) == EQ_EXPR
2001 && integer_onep (op1)))
2002 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2004 gimple second = SSA_NAME_DEF_STMT (op0);
2005 if (gimple_code (second) == GIMPLE_ASSIGN)
2007 enum tree_code code2 = gimple_assign_rhs_code (second);
2008 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2010 code = code2;
2011 op0 = gimple_assign_rhs1 (second);
2012 op1 = gimple_assign_rhs2 (second);
2014 /* If jumps are cheap turn some more codes into
2015 jumpy sequences. */
2016 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2018 if ((code2 == BIT_AND_EXPR
2019 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2020 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2021 || code2 == TRUTH_AND_EXPR)
2023 code = TRUTH_ANDIF_EXPR;
2024 op0 = gimple_assign_rhs1 (second);
2025 op1 = gimple_assign_rhs2 (second);
2027 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2029 code = TRUTH_ORIF_EXPR;
2030 op0 = gimple_assign_rhs1 (second);
2031 op1 = gimple_assign_rhs2 (second);
2037 last2 = last = get_last_insn ();
2039 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2040 set_curr_insn_location (gimple_location (stmt));
2042 /* These flags have no purpose in RTL land. */
2043 true_edge->flags &= ~EDGE_TRUE_VALUE;
2044 false_edge->flags &= ~EDGE_FALSE_VALUE;
2046 /* We can either have a pure conditional jump with one fallthru edge or
2047 two-way jump that needs to be decomposed into two basic blocks. */
2048 if (false_edge->dest == bb->next_bb)
2050 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2051 true_edge->probability);
2052 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2053 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2054 set_curr_insn_location (true_edge->goto_locus);
2055 false_edge->flags |= EDGE_FALLTHRU;
2056 maybe_cleanup_end_of_block (false_edge, last);
2057 return NULL;
2059 if (true_edge->dest == bb->next_bb)
2061 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2062 false_edge->probability);
2063 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2064 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2065 set_curr_insn_location (false_edge->goto_locus);
2066 true_edge->flags |= EDGE_FALLTHRU;
2067 maybe_cleanup_end_of_block (true_edge, last);
2068 return NULL;
2071 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2072 true_edge->probability);
2073 last = get_last_insn ();
2074 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2075 set_curr_insn_location (false_edge->goto_locus);
2076 emit_jump (label_rtx_for_bb (false_edge->dest));
2078 BB_END (bb) = last;
2079 if (BARRIER_P (BB_END (bb)))
2080 BB_END (bb) = PREV_INSN (BB_END (bb));
2081 update_bb_for_insn (bb);
2083 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2084 dest = false_edge->dest;
2085 redirect_edge_succ (false_edge, new_bb);
2086 false_edge->flags |= EDGE_FALLTHRU;
2087 new_bb->count = false_edge->count;
2088 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2089 if (current_loops && bb->loop_father)
2090 add_bb_to_loop (new_bb, bb->loop_father);
2091 new_edge = make_edge (new_bb, dest, 0);
2092 new_edge->probability = REG_BR_PROB_BASE;
2093 new_edge->count = new_bb->count;
2094 if (BARRIER_P (BB_END (new_bb)))
2095 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2096 update_bb_for_insn (new_bb);
2098 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2100 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2102 set_curr_insn_location (true_edge->goto_locus);
2103 true_edge->goto_locus = curr_insn_location ();
2106 return new_bb;
2109 /* Mark all calls that can have a transaction restart. */
2111 static void
2112 mark_transaction_restart_calls (gimple stmt)
2114 struct tm_restart_node dummy;
2115 void **slot;
2117 if (!cfun->gimple_df->tm_restart)
2118 return;
2120 dummy.stmt = stmt;
2121 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2122 if (slot)
2124 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2125 tree list = n->label_or_list;
2126 rtx insn;
2128 for (insn = next_real_insn (get_last_insn ());
2129 !CALL_P (insn);
2130 insn = next_real_insn (insn))
2131 continue;
2133 if (TREE_CODE (list) == LABEL_DECL)
2134 add_reg_note (insn, REG_TM, label_rtx (list));
2135 else
2136 for (; list ; list = TREE_CHAIN (list))
2137 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2141 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2142 statement STMT. */
2144 static void
2145 expand_call_stmt (gimple stmt)
2147 tree exp, decl, lhs;
2148 bool builtin_p;
2149 size_t i;
2151 if (gimple_call_internal_p (stmt))
2153 expand_internal_call (stmt);
2154 return;
2157 decl = gimple_call_fndecl (stmt);
2158 builtin_p = decl && DECL_BUILT_IN (decl);
2160 /* Bind bounds call is expanded as assignment. */
2161 if (builtin_p
2162 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2163 && DECL_FUNCTION_CODE (decl) == BUILT_IN_CHKP_BIND_BOUNDS)
2165 expand_assignment (gimple_call_lhs (stmt),
2166 gimple_call_arg (stmt, 0), false);
2167 return;
2170 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2171 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2173 /* If this is not a builtin function, the function type through which the
2174 call is made may be different from the type of the function. */
2175 if (!builtin_p)
2176 CALL_EXPR_FN (exp)
2177 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2178 CALL_EXPR_FN (exp));
2180 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2181 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2183 for (i = 0; i < gimple_call_num_args (stmt); i++)
2185 tree arg = gimple_call_arg (stmt, i);
2186 gimple def;
2187 /* TER addresses into arguments of builtin functions so we have a
2188 chance to infer more correct alignment information. See PR39954. */
2189 if (builtin_p
2190 && TREE_CODE (arg) == SSA_NAME
2191 && (def = get_gimple_for_ssa_name (arg))
2192 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2193 arg = gimple_assign_rhs1 (def);
2194 CALL_EXPR_ARG (exp, i) = arg;
2197 if (gimple_has_side_effects (stmt))
2198 TREE_SIDE_EFFECTS (exp) = 1;
2200 if (gimple_call_nothrow_p (stmt))
2201 TREE_NOTHROW (exp) = 1;
2203 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2204 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2205 if (decl
2206 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2207 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2208 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2209 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2210 else
2211 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2212 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2213 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2215 /* Ensure RTL is created for debug args. */
2216 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2218 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2219 unsigned int ix;
2220 tree dtemp;
2222 if (debug_args)
2223 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2225 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2226 expand_debug_expr (dtemp);
2230 lhs = gimple_call_lhs (stmt);
2231 if (lhs)
2232 expand_assignment (lhs, exp, false);
2233 else
2234 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2236 mark_transaction_restart_calls (stmt);
2240 /* Generate RTL for an asm statement (explicit assembler code).
2241 STRING is a STRING_CST node containing the assembler code text,
2242 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2243 insn is volatile; don't optimize it. */
2245 static void
2246 expand_asm_loc (tree string, int vol, location_t locus)
2248 rtx body;
2250 if (TREE_CODE (string) == ADDR_EXPR)
2251 string = TREE_OPERAND (string, 0);
2253 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2254 ggc_strdup (TREE_STRING_POINTER (string)),
2255 locus);
2257 MEM_VOLATILE_P (body) = vol;
2259 emit_insn (body);
2262 /* Return the number of times character C occurs in string S. */
2263 static int
2264 n_occurrences (int c, const char *s)
2266 int n = 0;
2267 while (*s)
2268 n += (*s++ == c);
2269 return n;
2272 /* A subroutine of expand_asm_operands. Check that all operands have
2273 the same number of alternatives. Return true if so. */
2275 static bool
2276 check_operand_nalternatives (tree outputs, tree inputs)
2278 if (outputs || inputs)
2280 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2281 int nalternatives
2282 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2283 tree next = inputs;
2285 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2287 error ("too many alternatives in %<asm%>");
2288 return false;
2291 tmp = outputs;
2292 while (tmp)
2294 const char *constraint
2295 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2297 if (n_occurrences (',', constraint) != nalternatives)
2299 error ("operand constraints for %<asm%> differ "
2300 "in number of alternatives");
2301 return false;
2304 if (TREE_CHAIN (tmp))
2305 tmp = TREE_CHAIN (tmp);
2306 else
2307 tmp = next, next = 0;
2311 return true;
2314 /* Check for overlap between registers marked in CLOBBERED_REGS and
2315 anything inappropriate in T. Emit error and return the register
2316 variable definition for error, NULL_TREE for ok. */
2318 static bool
2319 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2321 /* Conflicts between asm-declared register variables and the clobber
2322 list are not allowed. */
2323 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2325 if (overlap)
2327 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2328 DECL_NAME (overlap));
2330 /* Reset registerness to stop multiple errors emitted for a single
2331 variable. */
2332 DECL_REGISTER (overlap) = 0;
2333 return true;
2336 return false;
2339 /* Generate RTL for an asm statement with arguments.
2340 STRING is the instruction template.
2341 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2342 Each output or input has an expression in the TREE_VALUE and
2343 a tree list in TREE_PURPOSE which in turn contains a constraint
2344 name in TREE_VALUE (or NULL_TREE) and a constraint string
2345 in TREE_PURPOSE.
2346 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2347 that is clobbered by this insn.
2349 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2350 should be the fallthru basic block of the asm goto.
2352 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2353 Some elements of OUTPUTS may be replaced with trees representing temporary
2354 values. The caller should copy those temporary values to the originally
2355 specified lvalues.
2357 VOL nonzero means the insn is volatile; don't optimize it. */
2359 static void
2360 expand_asm_operands (tree string, tree outputs, tree inputs,
2361 tree clobbers, tree labels, basic_block fallthru_bb,
2362 int vol, location_t locus)
2364 rtvec argvec, constraintvec, labelvec;
2365 rtx body;
2366 int ninputs = list_length (inputs);
2367 int noutputs = list_length (outputs);
2368 int nlabels = list_length (labels);
2369 int ninout;
2370 int nclobbers;
2371 HARD_REG_SET clobbered_regs;
2372 int clobber_conflict_found = 0;
2373 tree tail;
2374 tree t;
2375 int i;
2376 /* Vector of RTX's of evaluated output operands. */
2377 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2378 int *inout_opnum = XALLOCAVEC (int, noutputs);
2379 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2380 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2381 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2382 int old_generating_concat_p = generating_concat_p;
2383 rtx fallthru_label = NULL_RTX;
2385 /* An ASM with no outputs needs to be treated as volatile, for now. */
2386 if (noutputs == 0)
2387 vol = 1;
2389 if (! check_operand_nalternatives (outputs, inputs))
2390 return;
2392 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2394 /* Collect constraints. */
2395 i = 0;
2396 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2397 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2398 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2399 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2401 /* Sometimes we wish to automatically clobber registers across an asm.
2402 Case in point is when the i386 backend moved from cc0 to a hard reg --
2403 maintaining source-level compatibility means automatically clobbering
2404 the flags register. */
2405 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2407 /* Count the number of meaningful clobbered registers, ignoring what
2408 we would ignore later. */
2409 nclobbers = 0;
2410 CLEAR_HARD_REG_SET (clobbered_regs);
2411 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2413 const char *regname;
2414 int nregs;
2416 if (TREE_VALUE (tail) == error_mark_node)
2417 return;
2418 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2420 i = decode_reg_name_and_count (regname, &nregs);
2421 if (i == -4)
2422 ++nclobbers;
2423 else if (i == -2)
2424 error ("unknown register name %qs in %<asm%>", regname);
2426 /* Mark clobbered registers. */
2427 if (i >= 0)
2429 int reg;
2431 for (reg = i; reg < i + nregs; reg++)
2433 ++nclobbers;
2435 /* Clobbering the PIC register is an error. */
2436 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2438 error ("PIC register clobbered by %qs in %<asm%>", regname);
2439 return;
2442 SET_HARD_REG_BIT (clobbered_regs, reg);
2447 /* First pass over inputs and outputs checks validity and sets
2448 mark_addressable if needed. */
2450 ninout = 0;
2451 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2453 tree val = TREE_VALUE (tail);
2454 tree type = TREE_TYPE (val);
2455 const char *constraint;
2456 bool is_inout;
2457 bool allows_reg;
2458 bool allows_mem;
2460 /* If there's an erroneous arg, emit no insn. */
2461 if (type == error_mark_node)
2462 return;
2464 /* Try to parse the output constraint. If that fails, there's
2465 no point in going further. */
2466 constraint = constraints[i];
2467 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2468 &allows_mem, &allows_reg, &is_inout))
2469 return;
2471 if (! allows_reg
2472 && (allows_mem
2473 || is_inout
2474 || (DECL_P (val)
2475 && REG_P (DECL_RTL (val))
2476 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2477 mark_addressable (val);
2479 if (is_inout)
2480 ninout++;
2483 ninputs += ninout;
2484 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2486 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2487 return;
2490 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2492 bool allows_reg, allows_mem;
2493 const char *constraint;
2495 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2496 would get VOIDmode and that could cause a crash in reload. */
2497 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2498 return;
2500 constraint = constraints[i + noutputs];
2501 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2502 constraints, &allows_mem, &allows_reg))
2503 return;
2505 if (! allows_reg && allows_mem)
2506 mark_addressable (TREE_VALUE (tail));
2509 /* Second pass evaluates arguments. */
2511 /* Make sure stack is consistent for asm goto. */
2512 if (nlabels > 0)
2513 do_pending_stack_adjust ();
2515 ninout = 0;
2516 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2518 tree val = TREE_VALUE (tail);
2519 tree type = TREE_TYPE (val);
2520 bool is_inout;
2521 bool allows_reg;
2522 bool allows_mem;
2523 rtx op;
2524 bool ok;
2526 ok = parse_output_constraint (&constraints[i], i, ninputs,
2527 noutputs, &allows_mem, &allows_reg,
2528 &is_inout);
2529 gcc_assert (ok);
2531 /* If an output operand is not a decl or indirect ref and our constraint
2532 allows a register, make a temporary to act as an intermediate.
2533 Make the asm insn write into that, then our caller will copy it to
2534 the real output operand. Likewise for promoted variables. */
2536 generating_concat_p = 0;
2538 real_output_rtx[i] = NULL_RTX;
2539 if ((TREE_CODE (val) == INDIRECT_REF
2540 && allows_mem)
2541 || (DECL_P (val)
2542 && (allows_mem || REG_P (DECL_RTL (val)))
2543 && ! (REG_P (DECL_RTL (val))
2544 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2545 || ! allows_reg
2546 || is_inout)
2548 op = expand_expr (val, NULL_RTX, VOIDmode,
2549 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2550 if (MEM_P (op))
2551 op = validize_mem (op);
2553 if (! allows_reg && !MEM_P (op))
2554 error ("output number %d not directly addressable", i);
2555 if ((! allows_mem && MEM_P (op))
2556 || GET_CODE (op) == CONCAT)
2558 real_output_rtx[i] = op;
2559 op = gen_reg_rtx (GET_MODE (op));
2560 if (is_inout)
2561 emit_move_insn (op, real_output_rtx[i]);
2564 else
2566 op = assign_temp (type, 0, 1);
2567 op = validize_mem (op);
2568 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2569 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2570 TREE_VALUE (tail) = make_tree (type, op);
2572 output_rtx[i] = op;
2574 generating_concat_p = old_generating_concat_p;
2576 if (is_inout)
2578 inout_mode[ninout] = TYPE_MODE (type);
2579 inout_opnum[ninout++] = i;
2582 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2583 clobber_conflict_found = 1;
2586 /* Make vectors for the expression-rtx, constraint strings,
2587 and named operands. */
2589 argvec = rtvec_alloc (ninputs);
2590 constraintvec = rtvec_alloc (ninputs);
2591 labelvec = rtvec_alloc (nlabels);
2593 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2594 : GET_MODE (output_rtx[0])),
2595 ggc_strdup (TREE_STRING_POINTER (string)),
2596 empty_string, 0, argvec, constraintvec,
2597 labelvec, locus);
2599 MEM_VOLATILE_P (body) = vol;
2601 /* Eval the inputs and put them into ARGVEC.
2602 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2604 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2606 bool allows_reg, allows_mem;
2607 const char *constraint;
2608 tree val, type;
2609 rtx op;
2610 bool ok;
2612 constraint = constraints[i + noutputs];
2613 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2614 constraints, &allows_mem, &allows_reg);
2615 gcc_assert (ok);
2617 generating_concat_p = 0;
2619 val = TREE_VALUE (tail);
2620 type = TREE_TYPE (val);
2621 /* EXPAND_INITIALIZER will not generate code for valid initializer
2622 constants, but will still generate code for other types of operand.
2623 This is the behavior we want for constant constraints. */
2624 op = expand_expr (val, NULL_RTX, VOIDmode,
2625 allows_reg ? EXPAND_NORMAL
2626 : allows_mem ? EXPAND_MEMORY
2627 : EXPAND_INITIALIZER);
2629 /* Never pass a CONCAT to an ASM. */
2630 if (GET_CODE (op) == CONCAT)
2631 op = force_reg (GET_MODE (op), op);
2632 else if (MEM_P (op))
2633 op = validize_mem (op);
2635 if (asm_operand_ok (op, constraint, NULL) <= 0)
2637 if (allows_reg && TYPE_MODE (type) != BLKmode)
2638 op = force_reg (TYPE_MODE (type), op);
2639 else if (!allows_mem)
2640 warning (0, "asm operand %d probably doesn%'t match constraints",
2641 i + noutputs);
2642 else if (MEM_P (op))
2644 /* We won't recognize either volatile memory or memory
2645 with a queued address as available a memory_operand
2646 at this point. Ignore it: clearly this *is* a memory. */
2648 else
2649 gcc_unreachable ();
2652 generating_concat_p = old_generating_concat_p;
2653 ASM_OPERANDS_INPUT (body, i) = op;
2655 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2656 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
2657 ggc_strdup (constraints[i + noutputs]));
2659 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2660 clobber_conflict_found = 1;
2663 /* Protect all the operands from the queue now that they have all been
2664 evaluated. */
2666 generating_concat_p = 0;
2668 /* For in-out operands, copy output rtx to input rtx. */
2669 for (i = 0; i < ninout; i++)
2671 int j = inout_opnum[i];
2672 char buffer[16];
2674 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2675 = output_rtx[j];
2677 sprintf (buffer, "%d", j);
2678 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2679 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
2682 /* Copy labels to the vector. */
2683 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2685 rtx r;
2686 /* If asm goto has any labels in the fallthru basic block, use
2687 a label that we emit immediately after the asm goto. Expansion
2688 may insert further instructions into the same basic block after
2689 asm goto and if we don't do this, insertion of instructions on
2690 the fallthru edge might misbehave. See PR58670. */
2691 if (fallthru_bb
2692 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2694 if (fallthru_label == NULL_RTX)
2695 fallthru_label = gen_label_rtx ();
2696 r = fallthru_label;
2698 else
2699 r = label_rtx (TREE_VALUE (tail));
2700 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2703 generating_concat_p = old_generating_concat_p;
2705 /* Now, for each output, construct an rtx
2706 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2707 ARGVEC CONSTRAINTS OPNAMES))
2708 If there is more than one, put them inside a PARALLEL. */
2710 if (nlabels > 0 && nclobbers == 0)
2712 gcc_assert (noutputs == 0);
2713 emit_jump_insn (body);
2715 else if (noutputs == 0 && nclobbers == 0)
2717 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2718 emit_insn (body);
2720 else if (noutputs == 1 && nclobbers == 0)
2722 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2723 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2725 else
2727 rtx obody = body;
2728 int num = noutputs;
2730 if (num == 0)
2731 num = 1;
2733 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2735 /* For each output operand, store a SET. */
2736 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2738 XVECEXP (body, 0, i)
2739 = gen_rtx_SET (VOIDmode,
2740 output_rtx[i],
2741 gen_rtx_ASM_OPERANDS
2742 (GET_MODE (output_rtx[i]),
2743 ggc_strdup (TREE_STRING_POINTER (string)),
2744 ggc_strdup (constraints[i]),
2745 i, argvec, constraintvec, labelvec, locus));
2747 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2750 /* If there are no outputs (but there are some clobbers)
2751 store the bare ASM_OPERANDS into the PARALLEL. */
2753 if (i == 0)
2754 XVECEXP (body, 0, i++) = obody;
2756 /* Store (clobber REG) for each clobbered register specified. */
2758 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2760 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2761 int reg, nregs;
2762 int j = decode_reg_name_and_count (regname, &nregs);
2763 rtx clobbered_reg;
2765 if (j < 0)
2767 if (j == -3) /* `cc', which is not a register */
2768 continue;
2770 if (j == -4) /* `memory', don't cache memory across asm */
2772 XVECEXP (body, 0, i++)
2773 = gen_rtx_CLOBBER (VOIDmode,
2774 gen_rtx_MEM
2775 (BLKmode,
2776 gen_rtx_SCRATCH (VOIDmode)));
2777 continue;
2780 /* Ignore unknown register, error already signaled. */
2781 continue;
2784 for (reg = j; reg < j + nregs; reg++)
2786 /* Use QImode since that's guaranteed to clobber just
2787 * one reg. */
2788 clobbered_reg = gen_rtx_REG (QImode, reg);
2790 /* Do sanity check for overlap between clobbers and
2791 respectively input and outputs that hasn't been
2792 handled. Such overlap should have been detected and
2793 reported above. */
2794 if (!clobber_conflict_found)
2796 int opno;
2798 /* We test the old body (obody) contents to avoid
2799 tripping over the under-construction body. */
2800 for (opno = 0; opno < noutputs; opno++)
2801 if (reg_overlap_mentioned_p (clobbered_reg,
2802 output_rtx[opno]))
2803 internal_error
2804 ("asm clobber conflict with output operand");
2806 for (opno = 0; opno < ninputs - ninout; opno++)
2807 if (reg_overlap_mentioned_p (clobbered_reg,
2808 ASM_OPERANDS_INPUT (obody,
2809 opno)))
2810 internal_error
2811 ("asm clobber conflict with input operand");
2814 XVECEXP (body, 0, i++)
2815 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2819 if (nlabels > 0)
2820 emit_jump_insn (body);
2821 else
2822 emit_insn (body);
2825 if (fallthru_label)
2826 emit_label (fallthru_label);
2828 /* For any outputs that needed reloading into registers, spill them
2829 back to where they belong. */
2830 for (i = 0; i < noutputs; ++i)
2831 if (real_output_rtx[i])
2832 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2834 crtl->has_asm_statement = 1;
2835 free_temp_slots ();
2839 static void
2840 expand_asm_stmt (gimple stmt)
2842 int noutputs;
2843 tree outputs, tail, t;
2844 tree *o;
2845 size_t i, n;
2846 const char *s;
2847 tree str, out, in, cl, labels;
2848 location_t locus = gimple_location (stmt);
2849 basic_block fallthru_bb = NULL;
2851 /* Meh... convert the gimple asm operands into real tree lists.
2852 Eventually we should make all routines work on the vectors instead
2853 of relying on TREE_CHAIN. */
2854 out = NULL_TREE;
2855 n = gimple_asm_noutputs (stmt);
2856 if (n > 0)
2858 t = out = gimple_asm_output_op (stmt, 0);
2859 for (i = 1; i < n; i++)
2860 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2863 in = NULL_TREE;
2864 n = gimple_asm_ninputs (stmt);
2865 if (n > 0)
2867 t = in = gimple_asm_input_op (stmt, 0);
2868 for (i = 1; i < n; i++)
2869 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2872 cl = NULL_TREE;
2873 n = gimple_asm_nclobbers (stmt);
2874 if (n > 0)
2876 t = cl = gimple_asm_clobber_op (stmt, 0);
2877 for (i = 1; i < n; i++)
2878 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2881 labels = NULL_TREE;
2882 n = gimple_asm_nlabels (stmt);
2883 if (n > 0)
2885 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2886 if (fallthru)
2887 fallthru_bb = fallthru->dest;
2888 t = labels = gimple_asm_label_op (stmt, 0);
2889 for (i = 1; i < n; i++)
2890 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2893 s = gimple_asm_string (stmt);
2894 str = build_string (strlen (s), s);
2896 if (gimple_asm_input_p (stmt))
2898 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
2899 return;
2902 outputs = out;
2903 noutputs = gimple_asm_noutputs (stmt);
2904 /* o[I] is the place that output number I should be written. */
2905 o = (tree *) alloca (noutputs * sizeof (tree));
2907 /* Record the contents of OUTPUTS before it is modified. */
2908 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2909 o[i] = TREE_VALUE (tail);
2911 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
2912 OUTPUTS some trees for where the values were actually stored. */
2913 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
2914 gimple_asm_volatile_p (stmt), locus);
2916 /* Copy all the intermediate outputs into the specified outputs. */
2917 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2919 if (o[i] != TREE_VALUE (tail))
2921 expand_assignment (o[i], TREE_VALUE (tail), false);
2922 free_temp_slots ();
2924 /* Restore the original value so that it's correct the next
2925 time we expand this function. */
2926 TREE_VALUE (tail) = o[i];
2931 /* Emit code to jump to the address
2932 specified by the pointer expression EXP. */
2934 static void
2935 expand_computed_goto (tree exp)
2937 rtx x = expand_normal (exp);
2939 x = convert_memory_address (Pmode, x);
2941 do_pending_stack_adjust ();
2942 emit_indirect_jump (x);
2945 /* Generate RTL code for a `goto' statement with target label LABEL.
2946 LABEL should be a LABEL_DECL tree node that was or will later be
2947 defined with `expand_label'. */
2949 static void
2950 expand_goto (tree label)
2952 #ifdef ENABLE_CHECKING
2953 /* Check for a nonlocal goto to a containing function. Should have
2954 gotten translated to __builtin_nonlocal_goto. */
2955 tree context = decl_function_context (label);
2956 gcc_assert (!context || context == current_function_decl);
2957 #endif
2959 emit_jump (label_rtx (label));
2962 /* Output a return with no value. */
2964 static void
2965 expand_null_return_1 (void)
2967 clear_pending_stack_adjust ();
2968 do_pending_stack_adjust ();
2969 emit_jump (return_label);
2972 /* Generate RTL to return from the current function, with no value.
2973 (That is, we do not do anything about returning any value.) */
2975 void
2976 expand_null_return (void)
2978 /* If this function was declared to return a value, but we
2979 didn't, clobber the return registers so that they are not
2980 propagated live to the rest of the function. */
2981 clobber_return_register ();
2983 expand_null_return_1 ();
2986 /* Generate RTL to return from the current function, with value VAL. */
2988 static void
2989 expand_value_return (rtx val)
2991 /* Copy the value to the return location unless it's already there. */
2993 tree decl = DECL_RESULT (current_function_decl);
2994 rtx return_reg = DECL_RTL (decl);
2995 if (return_reg != val)
2997 tree funtype = TREE_TYPE (current_function_decl);
2998 tree type = TREE_TYPE (decl);
2999 int unsignedp = TYPE_UNSIGNED (type);
3000 enum machine_mode old_mode = DECL_MODE (decl);
3001 enum machine_mode mode;
3002 if (DECL_BY_REFERENCE (decl))
3003 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3004 else
3005 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3007 if (mode != old_mode)
3008 val = convert_modes (mode, old_mode, val, unsignedp);
3010 if (GET_CODE (return_reg) == PARALLEL)
3011 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3012 else
3013 emit_move_insn (return_reg, val);
3016 expand_null_return_1 ();
3019 /* Generate RTL to evaluate the expression RETVAL and return it
3020 from the current function. */
3022 static void
3023 expand_return (tree retval)
3025 rtx result_rtl;
3026 rtx val = 0;
3027 tree retval_rhs;
3029 /* If function wants no value, give it none. */
3030 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3032 expand_normal (retval);
3033 expand_null_return ();
3034 return;
3037 if (retval == error_mark_node)
3039 /* Treat this like a return of no value from a function that
3040 returns a value. */
3041 expand_null_return ();
3042 return;
3044 else if ((TREE_CODE (retval) == MODIFY_EXPR
3045 || TREE_CODE (retval) == INIT_EXPR)
3046 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3047 retval_rhs = TREE_OPERAND (retval, 1);
3048 else
3049 retval_rhs = retval;
3051 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3053 /* If we are returning the RESULT_DECL, then the value has already
3054 been stored into it, so we don't have to do anything special. */
3055 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3056 expand_value_return (result_rtl);
3058 /* If the result is an aggregate that is being returned in one (or more)
3059 registers, load the registers here. */
3061 else if (retval_rhs != 0
3062 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3063 && REG_P (result_rtl))
3065 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3066 if (val)
3068 /* Use the mode of the result value on the return register. */
3069 PUT_MODE (result_rtl, GET_MODE (val));
3070 expand_value_return (val);
3072 else
3073 expand_null_return ();
3075 else if (retval_rhs != 0
3076 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3077 && (REG_P (result_rtl)
3078 || (GET_CODE (result_rtl) == PARALLEL)))
3080 /* Calculate the return value into a temporary (usually a pseudo
3081 reg). */
3082 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3083 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3085 val = assign_temp (nt, 0, 1);
3086 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3087 val = force_not_mem (val);
3088 /* Return the calculated value. */
3089 expand_value_return (val);
3091 else
3093 /* No hard reg used; calculate value into hard return reg. */
3094 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3095 expand_value_return (result_rtl);
3099 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3100 STMT that doesn't require special handling for outgoing edges. That
3101 is no tailcalls and no GIMPLE_COND. */
3103 static void
3104 expand_gimple_stmt_1 (gimple stmt)
3106 tree op0;
3108 set_curr_insn_location (gimple_location (stmt));
3110 switch (gimple_code (stmt))
3112 case GIMPLE_GOTO:
3113 op0 = gimple_goto_dest (stmt);
3114 if (TREE_CODE (op0) == LABEL_DECL)
3115 expand_goto (op0);
3116 else
3117 expand_computed_goto (op0);
3118 break;
3119 case GIMPLE_LABEL:
3120 expand_label (gimple_label_label (stmt));
3121 break;
3122 case GIMPLE_NOP:
3123 case GIMPLE_PREDICT:
3124 break;
3125 case GIMPLE_SWITCH:
3126 expand_case (stmt);
3127 break;
3128 case GIMPLE_ASM:
3129 expand_asm_stmt (stmt);
3130 break;
3131 case GIMPLE_CALL:
3132 expand_call_stmt (stmt);
3133 break;
3135 case GIMPLE_RETURN:
3136 op0 = gimple_return_retval (stmt);
3138 if (op0 && op0 != error_mark_node)
3140 tree result = DECL_RESULT (current_function_decl);
3142 /* If we are not returning the current function's RESULT_DECL,
3143 build an assignment to it. */
3144 if (op0 != result)
3146 /* I believe that a function's RESULT_DECL is unique. */
3147 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3149 /* ??? We'd like to use simply expand_assignment here,
3150 but this fails if the value is of BLKmode but the return
3151 decl is a register. expand_return has special handling
3152 for this combination, which eventually should move
3153 to common code. See comments there. Until then, let's
3154 build a modify expression :-/ */
3155 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3156 result, op0);
3159 if (!op0)
3160 expand_null_return ();
3161 else
3162 expand_return (op0);
3163 break;
3165 case GIMPLE_ASSIGN:
3167 tree lhs = gimple_assign_lhs (stmt);
3169 /* Tree expand used to fiddle with |= and &= of two bitfield
3170 COMPONENT_REFs here. This can't happen with gimple, the LHS
3171 of binary assigns must be a gimple reg. */
3173 if (TREE_CODE (lhs) != SSA_NAME
3174 || get_gimple_rhs_class (gimple_expr_code (stmt))
3175 == GIMPLE_SINGLE_RHS)
3177 tree rhs = gimple_assign_rhs1 (stmt);
3178 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3179 == GIMPLE_SINGLE_RHS);
3180 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3181 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3182 if (TREE_CLOBBER_P (rhs))
3183 /* This is a clobber to mark the going out of scope for
3184 this LHS. */
3186 else
3187 expand_assignment (lhs, rhs,
3188 gimple_assign_nontemporal_move_p (stmt));
3190 else
3192 rtx target, temp;
3193 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3194 struct separate_ops ops;
3195 bool promoted = false;
3197 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3198 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3199 promoted = true;
3201 ops.code = gimple_assign_rhs_code (stmt);
3202 ops.type = TREE_TYPE (lhs);
3203 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3205 case GIMPLE_TERNARY_RHS:
3206 ops.op2 = gimple_assign_rhs3 (stmt);
3207 /* Fallthru */
3208 case GIMPLE_BINARY_RHS:
3209 ops.op1 = gimple_assign_rhs2 (stmt);
3210 /* Fallthru */
3211 case GIMPLE_UNARY_RHS:
3212 ops.op0 = gimple_assign_rhs1 (stmt);
3213 break;
3214 default:
3215 gcc_unreachable ();
3217 ops.location = gimple_location (stmt);
3219 /* If we want to use a nontemporal store, force the value to
3220 register first. If we store into a promoted register,
3221 don't directly expand to target. */
3222 temp = nontemporal || promoted ? NULL_RTX : target;
3223 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3224 EXPAND_NORMAL);
3226 if (temp == target)
3228 else if (promoted)
3230 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
3231 /* If TEMP is a VOIDmode constant, use convert_modes to make
3232 sure that we properly convert it. */
3233 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3235 temp = convert_modes (GET_MODE (target),
3236 TYPE_MODE (ops.type),
3237 temp, unsignedp);
3238 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3239 GET_MODE (target), temp, unsignedp);
3242 convert_move (SUBREG_REG (target), temp, unsignedp);
3244 else if (nontemporal && emit_storent_insn (target, temp))
3246 else
3248 temp = force_operand (temp, target);
3249 if (temp != target)
3250 emit_move_insn (target, temp);
3254 break;
3256 default:
3257 gcc_unreachable ();
3261 /* Expand one gimple statement STMT and return the last RTL instruction
3262 before any of the newly generated ones.
3264 In addition to generating the necessary RTL instructions this also
3265 sets REG_EH_REGION notes if necessary and sets the current source
3266 location for diagnostics. */
3268 static rtx
3269 expand_gimple_stmt (gimple stmt)
3271 location_t saved_location = input_location;
3272 rtx last = get_last_insn ();
3273 int lp_nr;
3275 gcc_assert (cfun);
3277 /* We need to save and restore the current source location so that errors
3278 discovered during expansion are emitted with the right location. But
3279 it would be better if the diagnostic routines used the source location
3280 embedded in the tree nodes rather than globals. */
3281 if (gimple_has_location (stmt))
3282 input_location = gimple_location (stmt);
3284 expand_gimple_stmt_1 (stmt);
3286 /* Free any temporaries used to evaluate this statement. */
3287 free_temp_slots ();
3289 input_location = saved_location;
3291 /* Mark all insns that may trap. */
3292 lp_nr = lookup_stmt_eh_lp (stmt);
3293 if (lp_nr)
3295 rtx insn;
3296 for (insn = next_real_insn (last); insn;
3297 insn = next_real_insn (insn))
3299 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3300 /* If we want exceptions for non-call insns, any
3301 may_trap_p instruction may throw. */
3302 && GET_CODE (PATTERN (insn)) != CLOBBER
3303 && GET_CODE (PATTERN (insn)) != USE
3304 && insn_could_throw_p (insn))
3305 make_reg_eh_region_note (insn, 0, lp_nr);
3309 return last;
3312 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3313 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3314 generated a tail call (something that might be denied by the ABI
3315 rules governing the call; see calls.c).
3317 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3318 can still reach the rest of BB. The case here is __builtin_sqrt,
3319 where the NaN result goes through the external function (with a
3320 tailcall) and the normal result happens via a sqrt instruction. */
3322 static basic_block
3323 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
3325 rtx last2, last;
3326 edge e;
3327 edge_iterator ei;
3328 int probability;
3329 gcov_type count;
3331 last2 = last = expand_gimple_stmt (stmt);
3333 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3334 if (CALL_P (last) && SIBLING_CALL_P (last))
3335 goto found;
3337 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3339 *can_fallthru = true;
3340 return NULL;
3342 found:
3343 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3344 Any instructions emitted here are about to be deleted. */
3345 do_pending_stack_adjust ();
3347 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3348 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3349 EH or abnormal edges, we shouldn't have created a tail call in
3350 the first place. So it seems to me we should just be removing
3351 all edges here, or redirecting the existing fallthru edge to
3352 the exit block. */
3354 probability = 0;
3355 count = 0;
3357 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3359 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3361 if (e->dest != EXIT_BLOCK_PTR)
3363 e->dest->count -= e->count;
3364 e->dest->frequency -= EDGE_FREQUENCY (e);
3365 if (e->dest->count < 0)
3366 e->dest->count = 0;
3367 if (e->dest->frequency < 0)
3368 e->dest->frequency = 0;
3370 count += e->count;
3371 probability += e->probability;
3372 remove_edge (e);
3374 else
3375 ei_next (&ei);
3378 /* This is somewhat ugly: the call_expr expander often emits instructions
3379 after the sibcall (to perform the function return). These confuse the
3380 find_many_sub_basic_blocks code, so we need to get rid of these. */
3381 last = NEXT_INSN (last);
3382 gcc_assert (BARRIER_P (last));
3384 *can_fallthru = false;
3385 while (NEXT_INSN (last))
3387 /* For instance an sqrt builtin expander expands if with
3388 sibcall in the then and label for `else`. */
3389 if (LABEL_P (NEXT_INSN (last)))
3391 *can_fallthru = true;
3392 break;
3394 delete_insn (NEXT_INSN (last));
3397 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
3398 e->probability += probability;
3399 e->count += count;
3400 BB_END (bb) = last;
3401 update_bb_for_insn (bb);
3403 if (NEXT_INSN (last))
3405 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3407 last = BB_END (bb);
3408 if (BARRIER_P (last))
3409 BB_END (bb) = PREV_INSN (last);
3412 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3414 return bb;
3417 /* Return the difference between the floor and the truncated result of
3418 a signed division by OP1 with remainder MOD. */
3419 static rtx
3420 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3422 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3423 return gen_rtx_IF_THEN_ELSE
3424 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3425 gen_rtx_IF_THEN_ELSE
3426 (mode, gen_rtx_LT (BImode,
3427 gen_rtx_DIV (mode, op1, mod),
3428 const0_rtx),
3429 constm1_rtx, const0_rtx),
3430 const0_rtx);
3433 /* Return the difference between the ceil and the truncated result of
3434 a signed division by OP1 with remainder MOD. */
3435 static rtx
3436 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3438 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3439 return gen_rtx_IF_THEN_ELSE
3440 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3441 gen_rtx_IF_THEN_ELSE
3442 (mode, gen_rtx_GT (BImode,
3443 gen_rtx_DIV (mode, op1, mod),
3444 const0_rtx),
3445 const1_rtx, const0_rtx),
3446 const0_rtx);
3449 /* Return the difference between the ceil and the truncated result of
3450 an unsigned division by OP1 with remainder MOD. */
3451 static rtx
3452 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3454 /* (mod != 0 ? 1 : 0) */
3455 return gen_rtx_IF_THEN_ELSE
3456 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3457 const1_rtx, const0_rtx);
3460 /* Return the difference between the rounded and the truncated result
3461 of a signed division by OP1 with remainder MOD. Halfway cases are
3462 rounded away from zero, rather than to the nearest even number. */
3463 static rtx
3464 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3466 /* (abs (mod) >= abs (op1) - abs (mod)
3467 ? (op1 / mod > 0 ? 1 : -1)
3468 : 0) */
3469 return gen_rtx_IF_THEN_ELSE
3470 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3471 gen_rtx_MINUS (mode,
3472 gen_rtx_ABS (mode, op1),
3473 gen_rtx_ABS (mode, mod))),
3474 gen_rtx_IF_THEN_ELSE
3475 (mode, gen_rtx_GT (BImode,
3476 gen_rtx_DIV (mode, op1, mod),
3477 const0_rtx),
3478 const1_rtx, constm1_rtx),
3479 const0_rtx);
3482 /* Return the difference between the rounded and the truncated result
3483 of a unsigned division by OP1 with remainder MOD. Halfway cases
3484 are rounded away from zero, rather than to the nearest even
3485 number. */
3486 static rtx
3487 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3489 /* (mod >= op1 - mod ? 1 : 0) */
3490 return gen_rtx_IF_THEN_ELSE
3491 (mode, gen_rtx_GE (BImode, mod,
3492 gen_rtx_MINUS (mode, op1, mod)),
3493 const1_rtx, const0_rtx);
3496 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3497 any rtl. */
3499 static rtx
3500 convert_debug_memory_address (enum machine_mode mode, rtx x,
3501 addr_space_t as)
3503 enum machine_mode xmode = GET_MODE (x);
3505 #ifndef POINTERS_EXTEND_UNSIGNED
3506 gcc_assert (mode == Pmode
3507 || mode == targetm.addr_space.address_mode (as));
3508 gcc_assert (xmode == mode || xmode == VOIDmode);
3509 #else
3510 rtx temp;
3512 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3514 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3515 return x;
3517 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3518 x = simplify_gen_subreg (mode, x, xmode,
3519 subreg_lowpart_offset
3520 (mode, xmode));
3521 else if (POINTERS_EXTEND_UNSIGNED > 0)
3522 x = gen_rtx_ZERO_EXTEND (mode, x);
3523 else if (!POINTERS_EXTEND_UNSIGNED)
3524 x = gen_rtx_SIGN_EXTEND (mode, x);
3525 else
3527 switch (GET_CODE (x))
3529 case SUBREG:
3530 if ((SUBREG_PROMOTED_VAR_P (x)
3531 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3532 || (GET_CODE (SUBREG_REG (x)) == PLUS
3533 && REG_P (XEXP (SUBREG_REG (x), 0))
3534 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3535 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3536 && GET_MODE (SUBREG_REG (x)) == mode)
3537 return SUBREG_REG (x);
3538 break;
3539 case LABEL_REF:
3540 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
3541 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3542 return temp;
3543 case SYMBOL_REF:
3544 temp = shallow_copy_rtx (x);
3545 PUT_MODE (temp, mode);
3546 return temp;
3547 case CONST:
3548 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3549 if (temp)
3550 temp = gen_rtx_CONST (mode, temp);
3551 return temp;
3552 case PLUS:
3553 case MINUS:
3554 if (CONST_INT_P (XEXP (x, 1)))
3556 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3557 if (temp)
3558 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3560 break;
3561 default:
3562 break;
3564 /* Don't know how to express ptr_extend as operation in debug info. */
3565 return NULL;
3567 #endif /* POINTERS_EXTEND_UNSIGNED */
3569 return x;
3572 /* Return an RTX equivalent to the value of the parameter DECL. */
3574 static rtx
3575 expand_debug_parm_decl (tree decl)
3577 rtx incoming = DECL_INCOMING_RTL (decl);
3579 if (incoming
3580 && GET_MODE (incoming) != BLKmode
3581 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3582 || (MEM_P (incoming)
3583 && REG_P (XEXP (incoming, 0))
3584 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3586 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3588 #ifdef HAVE_window_save
3589 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3590 If the target machine has an explicit window save instruction, the
3591 actual entry value is the corresponding OUTGOING_REGNO instead. */
3592 if (REG_P (incoming)
3593 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3594 incoming
3595 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3596 OUTGOING_REGNO (REGNO (incoming)), 0);
3597 else if (MEM_P (incoming))
3599 rtx reg = XEXP (incoming, 0);
3600 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3602 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3603 incoming = replace_equiv_address_nv (incoming, reg);
3605 else
3606 incoming = copy_rtx (incoming);
3608 #endif
3610 ENTRY_VALUE_EXP (rtl) = incoming;
3611 return rtl;
3614 if (incoming
3615 && GET_MODE (incoming) != BLKmode
3616 && !TREE_ADDRESSABLE (decl)
3617 && MEM_P (incoming)
3618 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3619 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3620 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3621 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3622 return copy_rtx (incoming);
3624 return NULL_RTX;
3627 /* Return an RTX equivalent to the value of the tree expression EXP. */
3629 static rtx
3630 expand_debug_expr (tree exp)
3632 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3633 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3634 enum machine_mode inner_mode = VOIDmode;
3635 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3636 addr_space_t as;
3638 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3640 case tcc_expression:
3641 switch (TREE_CODE (exp))
3643 case COND_EXPR:
3644 case DOT_PROD_EXPR:
3645 case WIDEN_MULT_PLUS_EXPR:
3646 case WIDEN_MULT_MINUS_EXPR:
3647 case FMA_EXPR:
3648 goto ternary;
3650 case TRUTH_ANDIF_EXPR:
3651 case TRUTH_ORIF_EXPR:
3652 case TRUTH_AND_EXPR:
3653 case TRUTH_OR_EXPR:
3654 case TRUTH_XOR_EXPR:
3655 goto binary;
3657 case TRUTH_NOT_EXPR:
3658 goto unary;
3660 default:
3661 break;
3663 break;
3665 ternary:
3666 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3667 if (!op2)
3668 return NULL_RTX;
3669 /* Fall through. */
3671 binary:
3672 case tcc_binary:
3673 case tcc_comparison:
3674 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3675 if (!op1)
3676 return NULL_RTX;
3677 /* Fall through. */
3679 unary:
3680 case tcc_unary:
3681 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3682 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3683 if (!op0)
3684 return NULL_RTX;
3685 break;
3687 case tcc_type:
3688 case tcc_statement:
3689 gcc_unreachable ();
3691 case tcc_constant:
3692 case tcc_exceptional:
3693 case tcc_declaration:
3694 case tcc_reference:
3695 case tcc_vl_exp:
3696 break;
3699 switch (TREE_CODE (exp))
3701 case STRING_CST:
3702 if (!lookup_constant_def (exp))
3704 if (strlen (TREE_STRING_POINTER (exp)) + 1
3705 != (size_t) TREE_STRING_LENGTH (exp))
3706 return NULL_RTX;
3707 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3708 op0 = gen_rtx_MEM (BLKmode, op0);
3709 set_mem_attributes (op0, exp, 0);
3710 return op0;
3712 /* Fall through... */
3714 case INTEGER_CST:
3715 case REAL_CST:
3716 case FIXED_CST:
3717 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3718 return op0;
3720 case COMPLEX_CST:
3721 gcc_assert (COMPLEX_MODE_P (mode));
3722 op0 = expand_debug_expr (TREE_REALPART (exp));
3723 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3724 return gen_rtx_CONCAT (mode, op0, op1);
3726 case DEBUG_EXPR_DECL:
3727 op0 = DECL_RTL_IF_SET (exp);
3729 if (op0)
3730 return op0;
3732 op0 = gen_rtx_DEBUG_EXPR (mode);
3733 DEBUG_EXPR_TREE_DECL (op0) = exp;
3734 SET_DECL_RTL (exp, op0);
3736 return op0;
3738 case VAR_DECL:
3739 case PARM_DECL:
3740 case FUNCTION_DECL:
3741 case LABEL_DECL:
3742 case CONST_DECL:
3743 case RESULT_DECL:
3744 op0 = DECL_RTL_IF_SET (exp);
3746 /* This decl was probably optimized away. */
3747 if (!op0)
3749 if (TREE_CODE (exp) != VAR_DECL
3750 || DECL_EXTERNAL (exp)
3751 || !TREE_STATIC (exp)
3752 || !DECL_NAME (exp)
3753 || DECL_HARD_REGISTER (exp)
3754 || DECL_IN_CONSTANT_POOL (exp)
3755 || mode == VOIDmode)
3756 return NULL;
3758 op0 = make_decl_rtl_for_debug (exp);
3759 if (!MEM_P (op0)
3760 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3761 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3762 return NULL;
3764 else
3765 op0 = copy_rtx (op0);
3767 if (GET_MODE (op0) == BLKmode
3768 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3769 below would ICE. While it is likely a FE bug,
3770 try to be robust here. See PR43166. */
3771 || mode == BLKmode
3772 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3774 gcc_assert (MEM_P (op0));
3775 op0 = adjust_address_nv (op0, mode, 0);
3776 return op0;
3779 /* Fall through. */
3781 adjust_mode:
3782 case PAREN_EXPR:
3783 case NOP_EXPR:
3784 case CONVERT_EXPR:
3786 inner_mode = GET_MODE (op0);
3788 if (mode == inner_mode)
3789 return op0;
3791 if (inner_mode == VOIDmode)
3793 if (TREE_CODE (exp) == SSA_NAME)
3794 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3795 else
3796 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3797 if (mode == inner_mode)
3798 return op0;
3801 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3803 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3804 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3805 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3806 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3807 else
3808 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3810 else if (FLOAT_MODE_P (mode))
3812 gcc_assert (TREE_CODE (exp) != SSA_NAME);
3813 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3814 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3815 else
3816 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3818 else if (FLOAT_MODE_P (inner_mode))
3820 if (unsignedp)
3821 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3822 else
3823 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3825 else if (CONSTANT_P (op0)
3826 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3827 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3828 subreg_lowpart_offset (mode,
3829 inner_mode));
3830 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3831 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3832 : unsignedp)
3833 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3834 else
3835 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3837 return op0;
3840 case MEM_REF:
3841 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3843 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3844 TREE_OPERAND (exp, 0),
3845 TREE_OPERAND (exp, 1));
3846 if (newexp)
3847 return expand_debug_expr (newexp);
3849 /* FALLTHROUGH */
3850 case INDIRECT_REF:
3851 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3852 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3853 if (!op0)
3854 return NULL;
3856 if (TREE_CODE (exp) == MEM_REF)
3858 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3859 || (GET_CODE (op0) == PLUS
3860 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3861 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3862 Instead just use get_inner_reference. */
3863 goto component_ref;
3865 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3866 if (!op1 || !CONST_INT_P (op1))
3867 return NULL;
3869 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
3872 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3873 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3874 else
3875 as = ADDR_SPACE_GENERIC;
3877 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3878 op0, as);
3879 if (op0 == NULL_RTX)
3880 return NULL;
3882 op0 = gen_rtx_MEM (mode, op0);
3883 set_mem_attributes (op0, exp, 0);
3884 if (TREE_CODE (exp) == MEM_REF
3885 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3886 set_mem_expr (op0, NULL_TREE);
3887 set_mem_addr_space (op0, as);
3889 return op0;
3891 case TARGET_MEM_REF:
3892 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3893 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
3894 return NULL;
3896 op0 = expand_debug_expr
3897 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
3898 if (!op0)
3899 return NULL;
3901 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3902 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3903 else
3904 as = ADDR_SPACE_GENERIC;
3906 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3907 op0, as);
3908 if (op0 == NULL_RTX)
3909 return NULL;
3911 op0 = gen_rtx_MEM (mode, op0);
3913 set_mem_attributes (op0, exp, 0);
3914 set_mem_addr_space (op0, as);
3916 return op0;
3918 component_ref:
3919 case ARRAY_REF:
3920 case ARRAY_RANGE_REF:
3921 case COMPONENT_REF:
3922 case BIT_FIELD_REF:
3923 case REALPART_EXPR:
3924 case IMAGPART_EXPR:
3925 case VIEW_CONVERT_EXPR:
3927 enum machine_mode mode1;
3928 HOST_WIDE_INT bitsize, bitpos;
3929 tree offset;
3930 int volatilep = 0;
3931 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3932 &mode1, &unsignedp, &volatilep, false);
3933 rtx orig_op0;
3935 if (bitsize == 0)
3936 return NULL;
3938 orig_op0 = op0 = expand_debug_expr (tem);
3940 if (!op0)
3941 return NULL;
3943 if (offset)
3945 enum machine_mode addrmode, offmode;
3947 if (!MEM_P (op0))
3948 return NULL;
3950 op0 = XEXP (op0, 0);
3951 addrmode = GET_MODE (op0);
3952 if (addrmode == VOIDmode)
3953 addrmode = Pmode;
3955 op1 = expand_debug_expr (offset);
3956 if (!op1)
3957 return NULL;
3959 offmode = GET_MODE (op1);
3960 if (offmode == VOIDmode)
3961 offmode = TYPE_MODE (TREE_TYPE (offset));
3963 if (addrmode != offmode)
3964 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3965 subreg_lowpart_offset (addrmode,
3966 offmode));
3968 /* Don't use offset_address here, we don't need a
3969 recognizable address, and we don't want to generate
3970 code. */
3971 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3972 op0, op1));
3975 if (MEM_P (op0))
3977 if (mode1 == VOIDmode)
3978 /* Bitfield. */
3979 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
3980 if (bitpos >= BITS_PER_UNIT)
3982 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
3983 bitpos %= BITS_PER_UNIT;
3985 else if (bitpos < 0)
3987 HOST_WIDE_INT units
3988 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
3989 op0 = adjust_address_nv (op0, mode1, units);
3990 bitpos += units * BITS_PER_UNIT;
3992 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
3993 op0 = adjust_address_nv (op0, mode, 0);
3994 else if (GET_MODE (op0) != mode1)
3995 op0 = adjust_address_nv (op0, mode1, 0);
3996 else
3997 op0 = copy_rtx (op0);
3998 if (op0 == orig_op0)
3999 op0 = shallow_copy_rtx (op0);
4000 set_mem_attributes (op0, exp, 0);
4003 if (bitpos == 0 && mode == GET_MODE (op0))
4004 return op0;
4006 if (bitpos < 0)
4007 return NULL;
4009 if (GET_MODE (op0) == BLKmode)
4010 return NULL;
4012 if ((bitpos % BITS_PER_UNIT) == 0
4013 && bitsize == GET_MODE_BITSIZE (mode1))
4015 enum machine_mode opmode = GET_MODE (op0);
4017 if (opmode == VOIDmode)
4018 opmode = TYPE_MODE (TREE_TYPE (tem));
4020 /* This condition may hold if we're expanding the address
4021 right past the end of an array that turned out not to
4022 be addressable (i.e., the address was only computed in
4023 debug stmts). The gen_subreg below would rightfully
4024 crash, and the address doesn't really exist, so just
4025 drop it. */
4026 if (bitpos >= GET_MODE_BITSIZE (opmode))
4027 return NULL;
4029 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4030 return simplify_gen_subreg (mode, op0, opmode,
4031 bitpos / BITS_PER_UNIT);
4034 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4035 && TYPE_UNSIGNED (TREE_TYPE (exp))
4036 ? SIGN_EXTRACT
4037 : ZERO_EXTRACT, mode,
4038 GET_MODE (op0) != VOIDmode
4039 ? GET_MODE (op0)
4040 : TYPE_MODE (TREE_TYPE (tem)),
4041 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4044 case ABS_EXPR:
4045 return simplify_gen_unary (ABS, mode, op0, mode);
4047 case NEGATE_EXPR:
4048 return simplify_gen_unary (NEG, mode, op0, mode);
4050 case BIT_NOT_EXPR:
4051 return simplify_gen_unary (NOT, mode, op0, mode);
4053 case FLOAT_EXPR:
4054 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4055 0)))
4056 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4057 inner_mode);
4059 case FIX_TRUNC_EXPR:
4060 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4061 inner_mode);
4063 case POINTER_PLUS_EXPR:
4064 /* For the rare target where pointers are not the same size as
4065 size_t, we need to check for mis-matched modes and correct
4066 the addend. */
4067 if (op0 && op1
4068 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4069 && GET_MODE (op0) != GET_MODE (op1))
4071 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4072 /* If OP0 is a partial mode, then we must truncate, even if it has
4073 the same bitsize as OP1 as GCC's representation of partial modes
4074 is opaque. */
4075 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4076 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4077 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4078 GET_MODE (op1));
4079 else
4080 /* We always sign-extend, regardless of the signedness of
4081 the operand, because the operand is always unsigned
4082 here even if the original C expression is signed. */
4083 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4084 GET_MODE (op1));
4086 /* Fall through. */
4087 case PLUS_EXPR:
4088 return simplify_gen_binary (PLUS, mode, op0, op1);
4090 case MINUS_EXPR:
4091 return simplify_gen_binary (MINUS, mode, op0, op1);
4093 case MULT_EXPR:
4094 return simplify_gen_binary (MULT, mode, op0, op1);
4096 case RDIV_EXPR:
4097 case TRUNC_DIV_EXPR:
4098 case EXACT_DIV_EXPR:
4099 if (unsignedp)
4100 return simplify_gen_binary (UDIV, mode, op0, op1);
4101 else
4102 return simplify_gen_binary (DIV, mode, op0, op1);
4104 case TRUNC_MOD_EXPR:
4105 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4107 case FLOOR_DIV_EXPR:
4108 if (unsignedp)
4109 return simplify_gen_binary (UDIV, mode, op0, op1);
4110 else
4112 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4113 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4114 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4115 return simplify_gen_binary (PLUS, mode, div, adj);
4118 case FLOOR_MOD_EXPR:
4119 if (unsignedp)
4120 return simplify_gen_binary (UMOD, mode, op0, op1);
4121 else
4123 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4124 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4125 adj = simplify_gen_unary (NEG, mode,
4126 simplify_gen_binary (MULT, mode, adj, op1),
4127 mode);
4128 return simplify_gen_binary (PLUS, mode, mod, adj);
4131 case CEIL_DIV_EXPR:
4132 if (unsignedp)
4134 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4135 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4136 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4137 return simplify_gen_binary (PLUS, mode, div, adj);
4139 else
4141 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4142 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4143 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4144 return simplify_gen_binary (PLUS, mode, div, adj);
4147 case CEIL_MOD_EXPR:
4148 if (unsignedp)
4150 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4151 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4152 adj = simplify_gen_unary (NEG, mode,
4153 simplify_gen_binary (MULT, mode, adj, op1),
4154 mode);
4155 return simplify_gen_binary (PLUS, mode, mod, adj);
4157 else
4159 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4160 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4161 adj = simplify_gen_unary (NEG, mode,
4162 simplify_gen_binary (MULT, mode, adj, op1),
4163 mode);
4164 return simplify_gen_binary (PLUS, mode, mod, adj);
4167 case ROUND_DIV_EXPR:
4168 if (unsignedp)
4170 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4171 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4172 rtx adj = round_udiv_adjust (mode, mod, op1);
4173 return simplify_gen_binary (PLUS, mode, div, adj);
4175 else
4177 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4178 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4179 rtx adj = round_sdiv_adjust (mode, mod, op1);
4180 return simplify_gen_binary (PLUS, mode, div, adj);
4183 case ROUND_MOD_EXPR:
4184 if (unsignedp)
4186 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4187 rtx adj = round_udiv_adjust (mode, mod, op1);
4188 adj = simplify_gen_unary (NEG, mode,
4189 simplify_gen_binary (MULT, mode, adj, op1),
4190 mode);
4191 return simplify_gen_binary (PLUS, mode, mod, adj);
4193 else
4195 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4196 rtx adj = round_sdiv_adjust (mode, mod, op1);
4197 adj = simplify_gen_unary (NEG, mode,
4198 simplify_gen_binary (MULT, mode, adj, op1),
4199 mode);
4200 return simplify_gen_binary (PLUS, mode, mod, adj);
4203 case LSHIFT_EXPR:
4204 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4206 case RSHIFT_EXPR:
4207 if (unsignedp)
4208 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4209 else
4210 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4212 case LROTATE_EXPR:
4213 return simplify_gen_binary (ROTATE, mode, op0, op1);
4215 case RROTATE_EXPR:
4216 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4218 case MIN_EXPR:
4219 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4221 case MAX_EXPR:
4222 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4224 case BIT_AND_EXPR:
4225 case TRUTH_AND_EXPR:
4226 return simplify_gen_binary (AND, mode, op0, op1);
4228 case BIT_IOR_EXPR:
4229 case TRUTH_OR_EXPR:
4230 return simplify_gen_binary (IOR, mode, op0, op1);
4232 case BIT_XOR_EXPR:
4233 case TRUTH_XOR_EXPR:
4234 return simplify_gen_binary (XOR, mode, op0, op1);
4236 case TRUTH_ANDIF_EXPR:
4237 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4239 case TRUTH_ORIF_EXPR:
4240 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4242 case TRUTH_NOT_EXPR:
4243 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4245 case LT_EXPR:
4246 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4247 op0, op1);
4249 case LE_EXPR:
4250 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4251 op0, op1);
4253 case GT_EXPR:
4254 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4255 op0, op1);
4257 case GE_EXPR:
4258 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4259 op0, op1);
4261 case EQ_EXPR:
4262 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4264 case NE_EXPR:
4265 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4267 case UNORDERED_EXPR:
4268 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4270 case ORDERED_EXPR:
4271 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4273 case UNLT_EXPR:
4274 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4276 case UNLE_EXPR:
4277 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4279 case UNGT_EXPR:
4280 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4282 case UNGE_EXPR:
4283 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4285 case UNEQ_EXPR:
4286 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4288 case LTGT_EXPR:
4289 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4291 case COND_EXPR:
4292 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4294 case COMPLEX_EXPR:
4295 gcc_assert (COMPLEX_MODE_P (mode));
4296 if (GET_MODE (op0) == VOIDmode)
4297 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4298 if (GET_MODE (op1) == VOIDmode)
4299 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4300 return gen_rtx_CONCAT (mode, op0, op1);
4302 case CONJ_EXPR:
4303 if (GET_CODE (op0) == CONCAT)
4304 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4305 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4306 XEXP (op0, 1),
4307 GET_MODE_INNER (mode)));
4308 else
4310 enum machine_mode imode = GET_MODE_INNER (mode);
4311 rtx re, im;
4313 if (MEM_P (op0))
4315 re = adjust_address_nv (op0, imode, 0);
4316 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4318 else
4320 enum machine_mode ifmode = int_mode_for_mode (mode);
4321 enum machine_mode ihmode = int_mode_for_mode (imode);
4322 rtx halfsize;
4323 if (ifmode == BLKmode || ihmode == BLKmode)
4324 return NULL;
4325 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4326 re = op0;
4327 if (mode != ifmode)
4328 re = gen_rtx_SUBREG (ifmode, re, 0);
4329 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4330 if (imode != ihmode)
4331 re = gen_rtx_SUBREG (imode, re, 0);
4332 im = copy_rtx (op0);
4333 if (mode != ifmode)
4334 im = gen_rtx_SUBREG (ifmode, im, 0);
4335 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4336 if (imode != ihmode)
4337 im = gen_rtx_SUBREG (imode, im, 0);
4339 im = gen_rtx_NEG (imode, im);
4340 return gen_rtx_CONCAT (mode, re, im);
4343 case ADDR_EXPR:
4344 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4345 if (!op0 || !MEM_P (op0))
4347 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4348 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4349 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4350 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4351 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4352 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4354 if (handled_component_p (TREE_OPERAND (exp, 0)))
4356 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4357 tree decl
4358 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4359 &bitoffset, &bitsize, &maxsize);
4360 if ((TREE_CODE (decl) == VAR_DECL
4361 || TREE_CODE (decl) == PARM_DECL
4362 || TREE_CODE (decl) == RESULT_DECL)
4363 && (!TREE_ADDRESSABLE (decl)
4364 || target_for_debug_bind (decl))
4365 && (bitoffset % BITS_PER_UNIT) == 0
4366 && bitsize > 0
4367 && bitsize == maxsize)
4369 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4370 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4374 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4375 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4376 == ADDR_EXPR)
4378 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4379 0));
4380 if (op0 != NULL
4381 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4382 || (GET_CODE (op0) == PLUS
4383 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4384 && CONST_INT_P (XEXP (op0, 1)))))
4386 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4387 1));
4388 if (!op1 || !CONST_INT_P (op1))
4389 return NULL;
4391 return plus_constant (mode, op0, INTVAL (op1));
4395 return NULL;
4398 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
4399 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4401 return op0;
4403 case VECTOR_CST:
4405 unsigned i;
4407 op0 = gen_rtx_CONCATN
4408 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4410 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4412 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4413 if (!op1)
4414 return NULL;
4415 XVECEXP (op0, 0, i) = op1;
4418 return op0;
4421 case CONSTRUCTOR:
4422 if (TREE_CLOBBER_P (exp))
4423 return NULL;
4424 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4426 unsigned i;
4427 tree val;
4429 op0 = gen_rtx_CONCATN
4430 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4434 op1 = expand_debug_expr (val);
4435 if (!op1)
4436 return NULL;
4437 XVECEXP (op0, 0, i) = op1;
4440 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4442 op1 = expand_debug_expr
4443 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4445 if (!op1)
4446 return NULL;
4448 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4449 XVECEXP (op0, 0, i) = op1;
4452 return op0;
4454 else
4455 goto flag_unsupported;
4457 case CALL_EXPR:
4458 /* ??? Maybe handle some builtins? */
4459 return NULL;
4461 case SSA_NAME:
4463 gimple g = get_gimple_for_ssa_name (exp);
4464 if (g)
4466 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4467 if (!op0)
4468 return NULL;
4470 else
4472 int part = var_to_partition (SA.map, exp);
4474 if (part == NO_PARTITION)
4476 /* If this is a reference to an incoming value of parameter
4477 that is never used in the code or where the incoming
4478 value is never used in the code, use PARM_DECL's
4479 DECL_RTL if set. */
4480 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4481 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4483 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4484 if (op0)
4485 goto adjust_mode;
4486 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4487 if (op0)
4488 goto adjust_mode;
4490 return NULL;
4493 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4495 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4497 goto adjust_mode;
4500 case ERROR_MARK:
4501 return NULL;
4503 /* Vector stuff. For most of the codes we don't have rtl codes. */
4504 case REALIGN_LOAD_EXPR:
4505 case REDUC_MAX_EXPR:
4506 case REDUC_MIN_EXPR:
4507 case REDUC_PLUS_EXPR:
4508 case VEC_COND_EXPR:
4509 case VEC_LSHIFT_EXPR:
4510 case VEC_PACK_FIX_TRUNC_EXPR:
4511 case VEC_PACK_SAT_EXPR:
4512 case VEC_PACK_TRUNC_EXPR:
4513 case VEC_RSHIFT_EXPR:
4514 case VEC_UNPACK_FLOAT_HI_EXPR:
4515 case VEC_UNPACK_FLOAT_LO_EXPR:
4516 case VEC_UNPACK_HI_EXPR:
4517 case VEC_UNPACK_LO_EXPR:
4518 case VEC_WIDEN_MULT_HI_EXPR:
4519 case VEC_WIDEN_MULT_LO_EXPR:
4520 case VEC_WIDEN_MULT_EVEN_EXPR:
4521 case VEC_WIDEN_MULT_ODD_EXPR:
4522 case VEC_WIDEN_LSHIFT_HI_EXPR:
4523 case VEC_WIDEN_LSHIFT_LO_EXPR:
4524 case VEC_PERM_EXPR:
4525 return NULL;
4527 /* Misc codes. */
4528 case ADDR_SPACE_CONVERT_EXPR:
4529 case FIXED_CONVERT_EXPR:
4530 case OBJ_TYPE_REF:
4531 case WITH_SIZE_EXPR:
4532 return NULL;
4534 case DOT_PROD_EXPR:
4535 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4536 && SCALAR_INT_MODE_P (mode))
4539 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4540 0)))
4541 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4542 inner_mode);
4544 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4545 1)))
4546 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4547 inner_mode);
4548 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4549 return simplify_gen_binary (PLUS, mode, op0, op2);
4551 return NULL;
4553 case WIDEN_MULT_EXPR:
4554 case WIDEN_MULT_PLUS_EXPR:
4555 case WIDEN_MULT_MINUS_EXPR:
4556 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4557 && SCALAR_INT_MODE_P (mode))
4559 inner_mode = GET_MODE (op0);
4560 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4561 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4562 else
4563 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4564 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4565 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4566 else
4567 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4568 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4569 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4570 return op0;
4571 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4572 return simplify_gen_binary (PLUS, mode, op0, op2);
4573 else
4574 return simplify_gen_binary (MINUS, mode, op2, op0);
4576 return NULL;
4578 case MULT_HIGHPART_EXPR:
4579 /* ??? Similar to the above. */
4580 return NULL;
4582 case WIDEN_SUM_EXPR:
4583 case WIDEN_LSHIFT_EXPR:
4584 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4585 && SCALAR_INT_MODE_P (mode))
4588 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4589 0)))
4590 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4591 inner_mode);
4592 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4593 ? ASHIFT : PLUS, mode, op0, op1);
4595 return NULL;
4597 case FMA_EXPR:
4598 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4600 default:
4601 flag_unsupported:
4602 #ifdef ENABLE_CHECKING
4603 debug_tree (exp);
4604 gcc_unreachable ();
4605 #else
4606 return NULL;
4607 #endif
4611 /* Return an RTX equivalent to the source bind value of the tree expression
4612 EXP. */
4614 static rtx
4615 expand_debug_source_expr (tree exp)
4617 rtx op0 = NULL_RTX;
4618 enum machine_mode mode = VOIDmode, inner_mode;
4620 switch (TREE_CODE (exp))
4622 case PARM_DECL:
4624 mode = DECL_MODE (exp);
4625 op0 = expand_debug_parm_decl (exp);
4626 if (op0)
4627 break;
4628 /* See if this isn't an argument that has been completely
4629 optimized out. */
4630 if (!DECL_RTL_SET_P (exp)
4631 && !DECL_INCOMING_RTL (exp)
4632 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4634 tree aexp = DECL_ORIGIN (exp);
4635 if (DECL_CONTEXT (aexp)
4636 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4638 vec<tree, va_gc> **debug_args;
4639 unsigned int ix;
4640 tree ddecl;
4641 debug_args = decl_debug_args_lookup (current_function_decl);
4642 if (debug_args != NULL)
4644 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4645 ix += 2)
4646 if (ddecl == aexp)
4647 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4651 break;
4653 default:
4654 break;
4657 if (op0 == NULL_RTX)
4658 return NULL_RTX;
4660 inner_mode = GET_MODE (op0);
4661 if (mode == inner_mode)
4662 return op0;
4664 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4666 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4667 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4668 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4669 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4670 else
4671 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4673 else if (FLOAT_MODE_P (mode))
4674 gcc_unreachable ();
4675 else if (FLOAT_MODE_P (inner_mode))
4677 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4678 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4679 else
4680 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4682 else if (CONSTANT_P (op0)
4683 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4684 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4685 subreg_lowpart_offset (mode, inner_mode));
4686 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4687 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4688 else
4689 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4691 return op0;
4694 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4695 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4696 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4698 static void
4699 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
4701 rtx exp = *exp_p;
4703 if (exp == NULL_RTX)
4704 return;
4706 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4707 return;
4709 if (depth == 4)
4711 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4712 rtx dval = make_debug_expr_from_rtl (exp);
4714 /* Emit a debug bind insn before INSN. */
4715 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4716 DEBUG_EXPR_TREE_DECL (dval), exp,
4717 VAR_INIT_STATUS_INITIALIZED);
4719 emit_debug_insn_before (bind, insn);
4720 *exp_p = dval;
4721 return;
4724 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4725 int i, j;
4726 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4727 switch (*format_ptr++)
4729 case 'e':
4730 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4731 break;
4733 case 'E':
4734 case 'V':
4735 for (j = 0; j < XVECLEN (exp, i); j++)
4736 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4737 break;
4739 default:
4740 break;
4744 /* Expand the _LOCs in debug insns. We run this after expanding all
4745 regular insns, so that any variables referenced in the function
4746 will have their DECL_RTLs set. */
4748 static void
4749 expand_debug_locations (void)
4751 rtx insn;
4752 rtx last = get_last_insn ();
4753 int save_strict_alias = flag_strict_aliasing;
4755 /* New alias sets while setting up memory attributes cause
4756 -fcompare-debug failures, even though it doesn't bring about any
4757 codegen changes. */
4758 flag_strict_aliasing = 0;
4760 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4761 if (DEBUG_INSN_P (insn))
4763 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4764 rtx val, prev_insn, insn2;
4765 enum machine_mode mode;
4767 if (value == NULL_TREE)
4768 val = NULL_RTX;
4769 else
4771 if (INSN_VAR_LOCATION_STATUS (insn)
4772 == VAR_INIT_STATUS_UNINITIALIZED)
4773 val = expand_debug_source_expr (value);
4774 else
4775 val = expand_debug_expr (value);
4776 gcc_assert (last == get_last_insn ());
4779 if (!val)
4780 val = gen_rtx_UNKNOWN_VAR_LOC ();
4781 else
4783 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4785 gcc_assert (mode == GET_MODE (val)
4786 || (GET_MODE (val) == VOIDmode
4787 && (CONST_SCALAR_INT_P (val)
4788 || GET_CODE (val) == CONST_FIXED
4789 || GET_CODE (val) == LABEL_REF)));
4792 INSN_VAR_LOCATION_LOC (insn) = val;
4793 prev_insn = PREV_INSN (insn);
4794 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4795 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
4798 flag_strict_aliasing = save_strict_alias;
4801 /* Expand basic block BB from GIMPLE trees to RTL. */
4803 static basic_block
4804 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
4806 gimple_stmt_iterator gsi;
4807 gimple_seq stmts;
4808 gimple stmt = NULL;
4809 rtx note, last;
4810 edge e;
4811 edge_iterator ei;
4812 void **elt;
4814 if (dump_file)
4815 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4816 bb->index);
4818 /* Note that since we are now transitioning from GIMPLE to RTL, we
4819 cannot use the gsi_*_bb() routines because they expect the basic
4820 block to be in GIMPLE, instead of RTL. Therefore, we need to
4821 access the BB sequence directly. */
4822 stmts = bb_seq (bb);
4823 bb->il.gimple.seq = NULL;
4824 bb->il.gimple.phi_nodes = NULL;
4825 rtl_profile_for_bb (bb);
4826 init_rtl_bb_info (bb);
4827 bb->flags |= BB_RTL;
4829 /* Remove the RETURN_EXPR if we may fall though to the exit
4830 instead. */
4831 gsi = gsi_last (stmts);
4832 if (!gsi_end_p (gsi)
4833 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
4835 gimple ret_stmt = gsi_stmt (gsi);
4837 gcc_assert (single_succ_p (bb));
4838 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
4840 if (bb->next_bb == EXIT_BLOCK_PTR
4841 && !gimple_return_retval (ret_stmt))
4843 gsi_remove (&gsi, false);
4844 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4848 gsi = gsi_start (stmts);
4849 if (!gsi_end_p (gsi))
4851 stmt = gsi_stmt (gsi);
4852 if (gimple_code (stmt) != GIMPLE_LABEL)
4853 stmt = NULL;
4856 elt = pointer_map_contains (lab_rtx_for_bb, bb);
4858 if (stmt || elt)
4860 last = get_last_insn ();
4862 if (stmt)
4864 expand_gimple_stmt (stmt);
4865 gsi_next (&gsi);
4868 if (elt)
4869 emit_label ((rtx) *elt);
4871 /* Java emits line number notes in the top of labels.
4872 ??? Make this go away once line number notes are obsoleted. */
4873 BB_HEAD (bb) = NEXT_INSN (last);
4874 if (NOTE_P (BB_HEAD (bb)))
4875 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
4876 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
4878 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4880 else
4881 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
4883 NOTE_BASIC_BLOCK (note) = bb;
4885 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4887 basic_block new_bb;
4889 stmt = gsi_stmt (gsi);
4891 /* If this statement is a non-debug one, and we generate debug
4892 insns, then this one might be the last real use of a TERed
4893 SSA_NAME, but where there are still some debug uses further
4894 down. Expanding the current SSA name in such further debug
4895 uses by their RHS might lead to wrong debug info, as coalescing
4896 might make the operands of such RHS be placed into the same
4897 pseudo as something else. Like so:
4898 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4899 use(a_1);
4900 a_2 = ...
4901 #DEBUG ... => a_1
4902 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4903 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4904 the write to a_2 would actually have clobbered the place which
4905 formerly held a_0.
4907 So, instead of that, we recognize the situation, and generate
4908 debug temporaries at the last real use of TERed SSA names:
4909 a_1 = a_0 + 1;
4910 #DEBUG #D1 => a_1
4911 use(a_1);
4912 a_2 = ...
4913 #DEBUG ... => #D1
4915 if (MAY_HAVE_DEBUG_INSNS
4916 && SA.values
4917 && !is_gimple_debug (stmt))
4919 ssa_op_iter iter;
4920 tree op;
4921 gimple def;
4923 location_t sloc = curr_insn_location ();
4925 /* Look for SSA names that have their last use here (TERed
4926 names always have only one real use). */
4927 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4928 if ((def = get_gimple_for_ssa_name (op)))
4930 imm_use_iterator imm_iter;
4931 use_operand_p use_p;
4932 bool have_debug_uses = false;
4934 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4936 if (gimple_debug_bind_p (USE_STMT (use_p)))
4938 have_debug_uses = true;
4939 break;
4943 if (have_debug_uses)
4945 /* OP is a TERed SSA name, with DEF it's defining
4946 statement, and where OP is used in further debug
4947 instructions. Generate a debug temporary, and
4948 replace all uses of OP in debug insns with that
4949 temporary. */
4950 gimple debugstmt;
4951 tree value = gimple_assign_rhs_to_tree (def);
4952 tree vexpr = make_node (DEBUG_EXPR_DECL);
4953 rtx val;
4954 enum machine_mode mode;
4956 set_curr_insn_location (gimple_location (def));
4958 DECL_ARTIFICIAL (vexpr) = 1;
4959 TREE_TYPE (vexpr) = TREE_TYPE (value);
4960 if (DECL_P (value))
4961 mode = DECL_MODE (value);
4962 else
4963 mode = TYPE_MODE (TREE_TYPE (value));
4964 DECL_MODE (vexpr) = mode;
4966 val = gen_rtx_VAR_LOCATION
4967 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4969 emit_debug_insn (val);
4971 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4973 if (!gimple_debug_bind_p (debugstmt))
4974 continue;
4976 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
4977 SET_USE (use_p, vexpr);
4979 update_stmt (debugstmt);
4983 set_curr_insn_location (sloc);
4986 currently_expanding_gimple_stmt = stmt;
4988 /* Expand this statement, then evaluate the resulting RTL and
4989 fixup the CFG accordingly. */
4990 if (gimple_code (stmt) == GIMPLE_COND)
4992 new_bb = expand_gimple_cond (bb, stmt);
4993 if (new_bb)
4994 return new_bb;
4996 else if (gimple_debug_bind_p (stmt))
4998 location_t sloc = curr_insn_location ();
4999 gimple_stmt_iterator nsi = gsi;
5001 for (;;)
5003 tree var = gimple_debug_bind_get_var (stmt);
5004 tree value;
5005 rtx val;
5006 enum machine_mode mode;
5008 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5009 && TREE_CODE (var) != LABEL_DECL
5010 && !target_for_debug_bind (var))
5011 goto delink_debug_stmt;
5013 if (gimple_debug_bind_has_value_p (stmt))
5014 value = gimple_debug_bind_get_value (stmt);
5015 else
5016 value = NULL_TREE;
5018 last = get_last_insn ();
5020 set_curr_insn_location (gimple_location (stmt));
5022 if (DECL_P (var))
5023 mode = DECL_MODE (var);
5024 else
5025 mode = TYPE_MODE (TREE_TYPE (var));
5027 val = gen_rtx_VAR_LOCATION
5028 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5030 emit_debug_insn (val);
5032 if (dump_file && (dump_flags & TDF_DETAILS))
5034 /* We can't dump the insn with a TREE where an RTX
5035 is expected. */
5036 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5037 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5038 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5041 delink_debug_stmt:
5042 /* In order not to generate too many debug temporaries,
5043 we delink all uses of debug statements we already expanded.
5044 Therefore debug statements between definition and real
5045 use of TERed SSA names will continue to use the SSA name,
5046 and not be replaced with debug temps. */
5047 delink_stmt_imm_use (stmt);
5049 gsi = nsi;
5050 gsi_next (&nsi);
5051 if (gsi_end_p (nsi))
5052 break;
5053 stmt = gsi_stmt (nsi);
5054 if (!gimple_debug_bind_p (stmt))
5055 break;
5058 set_curr_insn_location (sloc);
5060 else if (gimple_debug_source_bind_p (stmt))
5062 location_t sloc = curr_insn_location ();
5063 tree var = gimple_debug_source_bind_get_var (stmt);
5064 tree value = gimple_debug_source_bind_get_value (stmt);
5065 rtx val;
5066 enum machine_mode mode;
5068 last = get_last_insn ();
5070 set_curr_insn_location (gimple_location (stmt));
5072 mode = DECL_MODE (var);
5074 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5075 VAR_INIT_STATUS_UNINITIALIZED);
5077 emit_debug_insn (val);
5079 if (dump_file && (dump_flags & TDF_DETAILS))
5081 /* We can't dump the insn with a TREE where an RTX
5082 is expected. */
5083 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5084 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5085 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5088 set_curr_insn_location (sloc);
5090 else
5092 if (is_gimple_call (stmt)
5093 && gimple_call_tail_p (stmt)
5094 && disable_tail_calls)
5095 gimple_call_set_tail (stmt, false);
5097 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
5099 bool can_fallthru;
5100 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5101 if (new_bb)
5103 if (can_fallthru)
5104 bb = new_bb;
5105 else
5106 return new_bb;
5109 else
5111 def_operand_p def_p;
5112 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5114 if (def_p != NULL)
5116 /* Ignore this stmt if it is in the list of
5117 replaceable expressions. */
5118 if (SA.values
5119 && bitmap_bit_p (SA.values,
5120 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5121 continue;
5123 last = expand_gimple_stmt (stmt);
5124 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5129 currently_expanding_gimple_stmt = NULL;
5131 /* Expand implicit goto and convert goto_locus. */
5132 FOR_EACH_EDGE (e, ei, bb->succs)
5134 if (e->goto_locus != UNKNOWN_LOCATION)
5135 set_curr_insn_location (e->goto_locus);
5136 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5138 emit_jump (label_rtx_for_bb (e->dest));
5139 e->flags &= ~EDGE_FALLTHRU;
5143 /* Expanded RTL can create a jump in the last instruction of block.
5144 This later might be assumed to be a jump to successor and break edge insertion.
5145 We need to insert dummy move to prevent this. PR41440. */
5146 if (single_succ_p (bb)
5147 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5148 && (last = get_last_insn ())
5149 && JUMP_P (last))
5151 rtx dummy = gen_reg_rtx (SImode);
5152 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5155 do_pending_stack_adjust ();
5157 /* Find the block tail. The last insn in the block is the insn
5158 before a barrier and/or table jump insn. */
5159 last = get_last_insn ();
5160 if (BARRIER_P (last))
5161 last = PREV_INSN (last);
5162 if (JUMP_TABLE_DATA_P (last))
5163 last = PREV_INSN (PREV_INSN (last));
5164 BB_END (bb) = last;
5166 update_bb_for_insn (bb);
5168 return bb;
5172 /* Create a basic block for initialization code. */
5174 static basic_block
5175 construct_init_block (void)
5177 basic_block init_block, first_block;
5178 edge e = NULL;
5179 int flags;
5181 /* Multiple entry points not supported yet. */
5182 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5183 init_rtl_bb_info (ENTRY_BLOCK_PTR);
5184 init_rtl_bb_info (EXIT_BLOCK_PTR);
5185 ENTRY_BLOCK_PTR->flags |= BB_RTL;
5186 EXIT_BLOCK_PTR->flags |= BB_RTL;
5188 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
5190 /* When entry edge points to first basic block, we don't need jump,
5191 otherwise we have to jump into proper target. */
5192 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
5194 tree label = gimple_block_label (e->dest);
5196 emit_jump (label_rtx (label));
5197 flags = 0;
5199 else
5200 flags = EDGE_FALLTHRU;
5202 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5203 get_last_insn (),
5204 ENTRY_BLOCK_PTR);
5205 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
5206 init_block->count = ENTRY_BLOCK_PTR->count;
5207 if (current_loops && ENTRY_BLOCK_PTR->loop_father)
5208 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR->loop_father);
5209 if (e)
5211 first_block = e->dest;
5212 redirect_edge_succ (e, init_block);
5213 e = make_edge (init_block, first_block, flags);
5215 else
5216 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5217 e->probability = REG_BR_PROB_BASE;
5218 e->count = ENTRY_BLOCK_PTR->count;
5220 update_bb_for_insn (init_block);
5221 return init_block;
5224 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5225 found in the block tree. */
5227 static void
5228 set_block_levels (tree block, int level)
5230 while (block)
5232 BLOCK_NUMBER (block) = level;
5233 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5234 block = BLOCK_CHAIN (block);
5238 /* Create a block containing landing pads and similar stuff. */
5240 static void
5241 construct_exit_block (void)
5243 rtx head = get_last_insn ();
5244 rtx end;
5245 basic_block exit_block;
5246 edge e, e2;
5247 unsigned ix;
5248 edge_iterator ei;
5249 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
5251 rtl_profile_for_bb (EXIT_BLOCK_PTR);
5253 /* Make sure the locus is set to the end of the function, so that
5254 epilogue line numbers and warnings are set properly. */
5255 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5256 input_location = cfun->function_end_locus;
5258 /* Generate rtl for function exit. */
5259 expand_function_end ();
5261 end = get_last_insn ();
5262 if (head == end)
5263 return;
5264 /* While emitting the function end we could move end of the last basic block.
5266 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
5267 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5268 head = NEXT_INSN (head);
5269 exit_block = create_basic_block (NEXT_INSN (head), end,
5270 EXIT_BLOCK_PTR->prev_bb);
5271 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
5272 exit_block->count = EXIT_BLOCK_PTR->count;
5273 if (current_loops && EXIT_BLOCK_PTR->loop_father)
5274 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR->loop_father);
5276 ix = 0;
5277 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
5279 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
5280 if (!(e->flags & EDGE_ABNORMAL))
5281 redirect_edge_succ (e, exit_block);
5282 else
5283 ix++;
5286 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
5287 e->probability = REG_BR_PROB_BASE;
5288 e->count = EXIT_BLOCK_PTR->count;
5289 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
5290 if (e2 != e)
5292 e->count -= e2->count;
5293 exit_block->count -= e2->count;
5294 exit_block->frequency -= EDGE_FREQUENCY (e2);
5296 if (e->count < 0)
5297 e->count = 0;
5298 if (exit_block->count < 0)
5299 exit_block->count = 0;
5300 if (exit_block->frequency < 0)
5301 exit_block->frequency = 0;
5302 update_bb_for_insn (exit_block);
5305 /* Helper function for discover_nonconstant_array_refs.
5306 Look for ARRAY_REF nodes with non-constant indexes and mark them
5307 addressable. */
5309 static tree
5310 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5311 void *data ATTRIBUTE_UNUSED)
5313 tree t = *tp;
5315 if (IS_TYPE_OR_DECL_P (t))
5316 *walk_subtrees = 0;
5317 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5319 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5320 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5321 && (!TREE_OPERAND (t, 2)
5322 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5323 || (TREE_CODE (t) == COMPONENT_REF
5324 && (!TREE_OPERAND (t,2)
5325 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5326 || TREE_CODE (t) == BIT_FIELD_REF
5327 || TREE_CODE (t) == REALPART_EXPR
5328 || TREE_CODE (t) == IMAGPART_EXPR
5329 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5330 || CONVERT_EXPR_P (t))
5331 t = TREE_OPERAND (t, 0);
5333 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5335 t = get_base_address (t);
5336 if (t && DECL_P (t)
5337 && DECL_MODE (t) != BLKmode)
5338 TREE_ADDRESSABLE (t) = 1;
5341 *walk_subtrees = 0;
5344 return NULL_TREE;
5347 /* RTL expansion is not able to compile array references with variable
5348 offsets for arrays stored in single register. Discover such
5349 expressions and mark variables as addressable to avoid this
5350 scenario. */
5352 static void
5353 discover_nonconstant_array_refs (void)
5355 basic_block bb;
5356 gimple_stmt_iterator gsi;
5358 FOR_EACH_BB (bb)
5359 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5361 gimple stmt = gsi_stmt (gsi);
5362 if (!is_gimple_debug (stmt))
5363 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5367 /* This function sets crtl->args.internal_arg_pointer to a virtual
5368 register if DRAP is needed. Local register allocator will replace
5369 virtual_incoming_args_rtx with the virtual register. */
5371 static void
5372 expand_stack_alignment (void)
5374 rtx drap_rtx;
5375 unsigned int preferred_stack_boundary;
5377 if (! SUPPORTS_STACK_ALIGNMENT)
5378 return;
5380 if (cfun->calls_alloca
5381 || cfun->has_nonlocal_label
5382 || crtl->has_nonlocal_goto)
5383 crtl->need_drap = true;
5385 /* Call update_stack_boundary here again to update incoming stack
5386 boundary. It may set incoming stack alignment to a different
5387 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5388 use the minimum incoming stack alignment to check if it is OK
5389 to perform sibcall optimization since sibcall optimization will
5390 only align the outgoing stack to incoming stack boundary. */
5391 if (targetm.calls.update_stack_boundary)
5392 targetm.calls.update_stack_boundary ();
5394 /* The incoming stack frame has to be aligned at least at
5395 parm_stack_boundary. */
5396 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5398 /* Update crtl->stack_alignment_estimated and use it later to align
5399 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5400 exceptions since callgraph doesn't collect incoming stack alignment
5401 in this case. */
5402 if (cfun->can_throw_non_call_exceptions
5403 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5404 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5405 else
5406 preferred_stack_boundary = crtl->preferred_stack_boundary;
5407 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5408 crtl->stack_alignment_estimated = preferred_stack_boundary;
5409 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5410 crtl->stack_alignment_needed = preferred_stack_boundary;
5412 gcc_assert (crtl->stack_alignment_needed
5413 <= crtl->stack_alignment_estimated);
5415 crtl->stack_realign_needed
5416 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5417 crtl->stack_realign_tried = crtl->stack_realign_needed;
5419 crtl->stack_realign_processed = true;
5421 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5422 alignment. */
5423 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5424 drap_rtx = targetm.calls.get_drap_rtx ();
5426 /* stack_realign_drap and drap_rtx must match. */
5427 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5429 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5430 if (NULL != drap_rtx)
5432 crtl->args.internal_arg_pointer = drap_rtx;
5434 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5435 needed. */
5436 fixup_tail_calls ();
5441 static void
5442 expand_main_function (void)
5444 #if (defined(INVOKE__main) \
5445 || (!defined(HAS_INIT_SECTION) \
5446 && !defined(INIT_SECTION_ASM_OP) \
5447 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5448 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5449 #endif
5453 /* Expand code to initialize the stack_protect_guard. This is invoked at
5454 the beginning of a function to be protected. */
5456 #ifndef HAVE_stack_protect_set
5457 # define HAVE_stack_protect_set 0
5458 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5459 #endif
5461 static void
5462 stack_protect_prologue (void)
5464 tree guard_decl = targetm.stack_protect_guard ();
5465 rtx x, y;
5467 x = expand_normal (crtl->stack_protect_guard);
5468 y = expand_normal (guard_decl);
5470 /* Allow the target to copy from Y to X without leaking Y into a
5471 register. */
5472 if (HAVE_stack_protect_set)
5474 rtx insn = gen_stack_protect_set (x, y);
5475 if (insn)
5477 emit_insn (insn);
5478 return;
5482 /* Otherwise do a straight move. */
5483 emit_move_insn (x, y);
5486 /* Translate the intermediate representation contained in the CFG
5487 from GIMPLE trees to RTL.
5489 We do conversion per basic block and preserve/update the tree CFG.
5490 This implies we have to do some magic as the CFG can simultaneously
5491 consist of basic blocks containing RTL and GIMPLE trees. This can
5492 confuse the CFG hooks, so be careful to not manipulate CFG during
5493 the expansion. */
5495 static unsigned int
5496 gimple_expand_cfg (void)
5498 basic_block bb, init_block;
5499 sbitmap blocks;
5500 edge_iterator ei;
5501 edge e;
5502 rtx var_seq, var_ret_seq;
5503 unsigned i;
5505 timevar_push (TV_OUT_OF_SSA);
5506 rewrite_out_of_ssa (&SA);
5507 timevar_pop (TV_OUT_OF_SSA);
5508 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5510 /* Make sure all values used by the optimization passes have sane
5511 defaults. */
5512 reg_renumber = 0;
5514 /* Some backends want to know that we are expanding to RTL. */
5515 currently_expanding_to_rtl = 1;
5516 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5517 free_dominance_info (CDI_DOMINATORS);
5519 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
5521 insn_locations_init ();
5522 if (!DECL_IS_BUILTIN (current_function_decl))
5524 /* Eventually, all FEs should explicitly set function_start_locus. */
5525 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
5526 set_curr_insn_location
5527 (DECL_SOURCE_LOCATION (current_function_decl));
5528 else
5529 set_curr_insn_location (cfun->function_start_locus);
5531 else
5532 set_curr_insn_location (UNKNOWN_LOCATION);
5533 prologue_location = curr_insn_location ();
5535 #ifdef INSN_SCHEDULING
5536 init_sched_attrs ();
5537 #endif
5539 /* Make sure first insn is a note even if we don't want linenums.
5540 This makes sure the first insn will never be deleted.
5541 Also, final expects a note to appear there. */
5542 emit_note (NOTE_INSN_DELETED);
5544 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5545 discover_nonconstant_array_refs ();
5547 targetm.expand_to_rtl_hook ();
5548 crtl->stack_alignment_needed = STACK_BOUNDARY;
5549 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5550 crtl->stack_alignment_estimated = 0;
5551 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5552 cfun->cfg->max_jumptable_ents = 0;
5554 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5555 of the function section at exapnsion time to predict distance of calls. */
5556 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5558 /* Expand the variables recorded during gimple lowering. */
5559 timevar_push (TV_VAR_EXPAND);
5560 start_sequence ();
5562 var_ret_seq = expand_used_vars ();
5564 var_seq = get_insns ();
5565 end_sequence ();
5566 timevar_pop (TV_VAR_EXPAND);
5568 /* Honor stack protection warnings. */
5569 if (warn_stack_protect)
5571 if (cfun->calls_alloca)
5572 warning (OPT_Wstack_protector,
5573 "stack protector not protecting local variables: "
5574 "variable length buffer");
5575 if (has_short_buffer && !crtl->stack_protect_guard)
5576 warning (OPT_Wstack_protector,
5577 "stack protector not protecting function: "
5578 "all local arrays are less than %d bytes long",
5579 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5582 /* Set up parameters and prepare for return, for the function. */
5583 expand_function_start (current_function_decl);
5585 /* If we emitted any instructions for setting up the variables,
5586 emit them before the FUNCTION_START note. */
5587 if (var_seq)
5589 emit_insn_before (var_seq, parm_birth_insn);
5591 /* In expand_function_end we'll insert the alloca save/restore
5592 before parm_birth_insn. We've just insertted an alloca call.
5593 Adjust the pointer to match. */
5594 parm_birth_insn = var_seq;
5597 /* Now that we also have the parameter RTXs, copy them over to our
5598 partitions. */
5599 for (i = 0; i < SA.map->num_partitions; i++)
5601 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5603 if (TREE_CODE (var) != VAR_DECL
5604 && !SA.partition_to_pseudo[i])
5605 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5606 gcc_assert (SA.partition_to_pseudo[i]);
5608 /* If this decl was marked as living in multiple places, reset
5609 this now to NULL. */
5610 if (DECL_RTL_IF_SET (var) == pc_rtx)
5611 SET_DECL_RTL (var, NULL);
5613 /* Some RTL parts really want to look at DECL_RTL(x) when x
5614 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5615 SET_DECL_RTL here making this available, but that would mean
5616 to select one of the potentially many RTLs for one DECL. Instead
5617 of doing that we simply reset the MEM_EXPR of the RTL in question,
5618 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5619 if (!DECL_RTL_SET_P (var))
5621 if (MEM_P (SA.partition_to_pseudo[i]))
5622 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5626 /* If we have a class containing differently aligned pointers
5627 we need to merge those into the corresponding RTL pointer
5628 alignment. */
5629 for (i = 1; i < num_ssa_names; i++)
5631 tree name = ssa_name (i);
5632 int part;
5633 rtx r;
5635 if (!name
5636 /* We might have generated new SSA names in
5637 update_alias_info_with_stack_vars. They will have a NULL
5638 defining statements, and won't be part of the partitioning,
5639 so ignore those. */
5640 || !SSA_NAME_DEF_STMT (name))
5641 continue;
5642 part = var_to_partition (SA.map, name);
5643 if (part == NO_PARTITION)
5644 continue;
5646 /* Adjust all partition members to get the underlying decl of
5647 the representative which we might have created in expand_one_var. */
5648 if (SSA_NAME_VAR (name) == NULL_TREE)
5650 tree leader = partition_to_var (SA.map, part);
5651 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5652 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5654 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5655 continue;
5657 r = SA.partition_to_pseudo[part];
5658 if (REG_P (r))
5659 mark_reg_pointer (r, get_pointer_alignment (name));
5662 /* If this function is `main', emit a call to `__main'
5663 to run global initializers, etc. */
5664 if (DECL_NAME (current_function_decl)
5665 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5666 && DECL_FILE_SCOPE_P (current_function_decl))
5667 expand_main_function ();
5669 /* Initialize the stack_protect_guard field. This must happen after the
5670 call to __main (if any) so that the external decl is initialized. */
5671 if (crtl->stack_protect_guard)
5672 stack_protect_prologue ();
5674 expand_phi_nodes (&SA);
5676 /* Register rtl specific functions for cfg. */
5677 rtl_register_cfg_hooks ();
5679 init_block = construct_init_block ();
5681 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5682 remaining edges later. */
5683 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
5684 e->flags &= ~EDGE_EXECUTABLE;
5686 lab_rtx_for_bb = pointer_map_create ();
5687 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
5688 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
5690 if (MAY_HAVE_DEBUG_INSNS)
5691 expand_debug_locations ();
5693 /* Free stuff we no longer need after GIMPLE optimizations. */
5694 free_dominance_info (CDI_DOMINATORS);
5695 free_dominance_info (CDI_POST_DOMINATORS);
5696 delete_tree_cfg_annotations ();
5698 timevar_push (TV_OUT_OF_SSA);
5699 finish_out_of_ssa (&SA);
5700 timevar_pop (TV_OUT_OF_SSA);
5702 timevar_push (TV_POST_EXPAND);
5703 /* We are no longer in SSA form. */
5704 cfun->gimple_df->in_ssa_p = false;
5705 if (current_loops)
5706 loops_state_clear (LOOP_CLOSED_SSA);
5708 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5709 conservatively to true until they are all profile aware. */
5710 pointer_map_destroy (lab_rtx_for_bb);
5711 free_histograms ();
5713 construct_exit_block ();
5714 insn_locations_finalize ();
5716 if (var_ret_seq)
5718 rtx after = return_label;
5719 rtx next = NEXT_INSN (after);
5720 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5721 after = next;
5722 emit_insn_after (var_ret_seq, after);
5725 /* Zap the tree EH table. */
5726 set_eh_throw_stmt_table (cfun, NULL);
5728 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5729 split edges which edge insertions might do. */
5730 rebuild_jump_labels (get_insns ());
5732 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
5734 edge e;
5735 edge_iterator ei;
5736 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5738 if (e->insns.r)
5740 rebuild_jump_labels_chain (e->insns.r);
5741 /* Put insns after parm birth, but before
5742 NOTE_INSNS_FUNCTION_BEG. */
5743 if (e->src == ENTRY_BLOCK_PTR
5744 && single_succ_p (ENTRY_BLOCK_PTR))
5746 rtx insns = e->insns.r;
5747 e->insns.r = NULL_RTX;
5748 if (NOTE_P (parm_birth_insn)
5749 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5750 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5751 else
5752 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
5754 else
5755 commit_one_edge_insertion (e);
5757 else
5758 ei_next (&ei);
5762 /* We're done expanding trees to RTL. */
5763 currently_expanding_to_rtl = 0;
5765 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
5767 edge e;
5768 edge_iterator ei;
5769 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5771 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5772 e->flags &= ~EDGE_EXECUTABLE;
5774 /* At the moment not all abnormal edges match the RTL
5775 representation. It is safe to remove them here as
5776 find_many_sub_basic_blocks will rediscover them.
5777 In the future we should get this fixed properly. */
5778 if ((e->flags & EDGE_ABNORMAL)
5779 && !(e->flags & EDGE_SIBCALL))
5780 remove_edge (e);
5781 else
5782 ei_next (&ei);
5786 blocks = sbitmap_alloc (last_basic_block);
5787 bitmap_ones (blocks);
5788 find_many_sub_basic_blocks (blocks);
5789 sbitmap_free (blocks);
5790 purge_all_dead_edges ();
5792 expand_stack_alignment ();
5794 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5795 function. */
5796 if (crtl->tail_call_emit)
5797 fixup_tail_calls ();
5799 /* After initial rtl generation, call back to finish generating
5800 exception support code. We need to do this before cleaning up
5801 the CFG as the code does not expect dead landing pads. */
5802 if (cfun->eh->region_tree != NULL)
5803 finish_eh_generation ();
5805 /* Remove unreachable blocks, otherwise we cannot compute dominators
5806 which are needed for loop state verification. As a side-effect
5807 this also compacts blocks.
5808 ??? We cannot remove trivially dead insns here as for example
5809 the DRAP reg on i?86 is not magically live at this point.
5810 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5811 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5813 #ifdef ENABLE_CHECKING
5814 verify_flow_info ();
5815 #endif
5817 /* Initialize pseudos allocated for hard registers. */
5818 emit_initial_value_sets ();
5820 /* And finally unshare all RTL. */
5821 unshare_all_rtl ();
5823 /* There's no need to defer outputting this function any more; we
5824 know we want to output it. */
5825 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5827 /* Now that we're done expanding trees to RTL, we shouldn't have any
5828 more CONCATs anywhere. */
5829 generating_concat_p = 0;
5831 if (dump_file)
5833 fprintf (dump_file,
5834 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5835 /* And the pass manager will dump RTL for us. */
5838 /* If we're emitting a nested function, make sure its parent gets
5839 emitted as well. Doing otherwise confuses debug info. */
5841 tree parent;
5842 for (parent = DECL_CONTEXT (current_function_decl);
5843 parent != NULL_TREE;
5844 parent = get_containing_scope (parent))
5845 if (TREE_CODE (parent) == FUNCTION_DECL)
5846 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
5849 /* We are now committed to emitting code for this function. Do any
5850 preparation, such as emitting abstract debug info for the inline
5851 before it gets mangled by optimization. */
5852 if (cgraph_function_possibly_inlined_p (current_function_decl))
5853 (*debug_hooks->outlining_inline_function) (current_function_decl);
5855 TREE_ASM_WRITTEN (current_function_decl) = 1;
5857 /* After expanding, the return labels are no longer needed. */
5858 return_label = NULL;
5859 naked_return_label = NULL;
5861 /* After expanding, the tm_restart map is no longer needed. */
5862 if (cfun->gimple_df->tm_restart)
5864 htab_delete (cfun->gimple_df->tm_restart);
5865 cfun->gimple_df->tm_restart = NULL;
5868 /* Tag the blocks with a depth number so that change_scope can find
5869 the common parent easily. */
5870 set_block_levels (DECL_INITIAL (cfun->decl), 0);
5871 default_rtl_profile ();
5873 timevar_pop (TV_POST_EXPAND);
5875 return 0;
5878 namespace {
5880 const pass_data pass_data_expand =
5882 RTL_PASS, /* type */
5883 "expand", /* name */
5884 OPTGROUP_NONE, /* optinfo_flags */
5885 false, /* has_gate */
5886 true, /* has_execute */
5887 TV_EXPAND, /* tv_id */
5888 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5889 | PROP_gimple_lcx
5890 | PROP_gimple_lvec ), /* properties_required */
5891 PROP_rtl, /* properties_provided */
5892 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5893 ( TODO_verify_ssa | TODO_verify_flow
5894 | TODO_verify_stmts ), /* todo_flags_start */
5895 0, /* todo_flags_finish */
5898 class pass_expand : public rtl_opt_pass
5900 public:
5901 pass_expand (gcc::context *ctxt)
5902 : rtl_opt_pass (pass_data_expand, ctxt)
5905 /* opt_pass methods: */
5906 unsigned int execute () { return gimple_expand_cfg (); }
5908 }; // class pass_expand
5910 } // anon namespace
5912 rtl_opt_pass *
5913 make_pass_expand (gcc::context *ctxt)
5915 return new pass_expand (ctxt);