* include/bits/atomic_futex.h [_GLIBCXX_HAVE_LINUX_FUTEX]
[official-gcc.git] / gcc / cfgexpand.c
blob0b199535ca560f04ab8e287a2c04ea920fae975d
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "cfghooks.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "ssa.h"
29 #include "alias.h"
30 #include "fold-const.h"
31 #include "varasm.h"
32 #include "stor-layout.h"
33 #include "stmt.h"
34 #include "print-tree.h"
35 #include "tm_p.h"
36 #include "cfgrtl.h"
37 #include "cfganal.h"
38 #include "cfgbuild.h"
39 #include "cfgcleanup.h"
40 #include "insn-codes.h"
41 #include "optabs.h"
42 #include "flags.h"
43 #include "insn-config.h"
44 #include "expmed.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "emit-rtl.h"
49 #include "expr.h"
50 #include "langhooks.h"
51 #include "internal-fn.h"
52 #include "tree-eh.h"
53 #include "gimple-iterator.h"
54 #include "gimple-walk.h"
55 #include "cgraph.h"
56 #include "tree-cfg.h"
57 #include "tree-dfa.h"
58 #include "tree-ssa.h"
59 #include "tree-pass.h"
60 #include "except.h"
61 #include "diagnostic.h"
62 #include "gimple-pretty-print.h"
63 #include "toplev.h"
64 #include "debug.h"
65 #include "params.h"
66 #include "tree-inline.h"
67 #include "value-prof.h"
68 #include "target.h"
69 #include "tree-ssa-live.h"
70 #include "tree-outof-ssa.h"
71 #include "cfgloop.h"
72 #include "regs.h" /* For reg_renumber. */
73 #include "insn-attr.h" /* For INSN_SCHEDULING. */
74 #include "asan.h"
75 #include "tree-ssa-address.h"
76 #include "recog.h"
77 #include "output.h"
78 #include "builtins.h"
79 #include "tree-chkp.h"
80 #include "rtl-chkp.h"
82 /* Some systems use __main in a way incompatible with its use in gcc, in these
83 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
84 give the same symbol without quotes for an alternative entry point. You
85 must define both, or neither. */
86 #ifndef NAME__MAIN
87 #define NAME__MAIN "__main"
88 #endif
90 /* This variable holds information helping the rewriting of SSA trees
91 into RTL. */
92 struct ssaexpand SA;
94 /* This variable holds the currently expanded gimple statement for purposes
95 of comminucating the profile info to the builtin expanders. */
96 gimple currently_expanding_gimple_stmt;
98 static rtx expand_debug_expr (tree);
100 /* Return an expression tree corresponding to the RHS of GIMPLE
101 statement STMT. */
103 tree
104 gimple_assign_rhs_to_tree (gimple stmt)
106 tree t;
107 enum gimple_rhs_class grhs_class;
109 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
111 if (grhs_class == GIMPLE_TERNARY_RHS)
112 t = build3 (gimple_assign_rhs_code (stmt),
113 TREE_TYPE (gimple_assign_lhs (stmt)),
114 gimple_assign_rhs1 (stmt),
115 gimple_assign_rhs2 (stmt),
116 gimple_assign_rhs3 (stmt));
117 else if (grhs_class == GIMPLE_BINARY_RHS)
118 t = build2 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt),
121 gimple_assign_rhs2 (stmt));
122 else if (grhs_class == GIMPLE_UNARY_RHS)
123 t = build1 (gimple_assign_rhs_code (stmt),
124 TREE_TYPE (gimple_assign_lhs (stmt)),
125 gimple_assign_rhs1 (stmt));
126 else if (grhs_class == GIMPLE_SINGLE_RHS)
128 t = gimple_assign_rhs1 (stmt);
129 /* Avoid modifying this tree in place below. */
130 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
131 && gimple_location (stmt) != EXPR_LOCATION (t))
132 || (gimple_block (stmt)
133 && currently_expanding_to_rtl
134 && EXPR_P (t)))
135 t = copy_node (t);
137 else
138 gcc_unreachable ();
140 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
141 SET_EXPR_LOCATION (t, gimple_location (stmt));
143 return t;
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
149 #endif
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
158 static tree
159 leader_merge (tree cur, tree next)
161 if (cur == NULL || cur == next)
162 return next;
164 if (DECL_P (cur) && DECL_IGNORED_P (cur))
165 return cur;
167 if (DECL_P (next) && DECL_IGNORED_P (next))
168 return next;
170 return cur;
174 /* Return the RTL for the default SSA def of a PARM or RESULT, if
175 there is one. */
178 get_rtl_for_parm_ssa_default_def (tree var)
180 gcc_assert (TREE_CODE (var) == PARM_DECL || TREE_CODE (var) == RESULT_DECL);
182 if (!is_gimple_reg (var))
183 return NULL_RTX;
185 /* If we've already determined RTL for the decl, use it. This is
186 not just an optimization: if VAR is a PARM whose incoming value
187 is unused, we won't find a default def to use its partition, but
188 we still want to use the location of the parm, if it was used at
189 all. During assign_parms, until a location is assigned for the
190 VAR, RTL can only for a parm or result if we're not coalescing
191 across variables, when we know we're coalescing all SSA_NAMEs of
192 each parm or result, and we're not coalescing them with names
193 pertaining to other variables, such as other parms' default
194 defs. */
195 if (DECL_RTL_SET_P (var))
197 gcc_assert (DECL_RTL (var) != pc_rtx);
198 return DECL_RTL (var);
201 tree name = ssa_default_def (cfun, var);
203 if (!name)
204 return NULL_RTX;
206 int part = var_to_partition (SA.map, name);
207 if (part == NO_PARTITION)
208 return NULL_RTX;
210 return SA.partition_to_pseudo[part];
213 /* Associate declaration T with storage space X. If T is no
214 SSA name this is exactly SET_DECL_RTL, otherwise make the
215 partition of T associated with X. */
216 static inline void
217 set_rtl (tree t, rtx x)
219 if (x && SSAVAR (t))
221 bool skip = false;
222 tree cur = NULL_TREE;
224 if (MEM_P (x))
225 cur = MEM_EXPR (x);
226 else if (REG_P (x))
227 cur = REG_EXPR (x);
228 else if (GET_CODE (x) == CONCAT
229 && REG_P (XEXP (x, 0)))
230 cur = REG_EXPR (XEXP (x, 0));
231 else if (GET_CODE (x) == PARALLEL)
232 cur = REG_EXPR (XVECEXP (x, 0, 0));
233 else if (x == pc_rtx)
234 skip = true;
235 else
236 gcc_unreachable ();
238 tree next = skip ? cur : leader_merge (cur, SSAVAR (t));
240 if (cur != next)
242 if (MEM_P (x))
243 set_mem_attributes (x, next, true);
244 else
245 set_reg_attrs_for_decl_rtl (next, x);
249 if (TREE_CODE (t) == SSA_NAME)
251 int part = var_to_partition (SA.map, t);
252 if (part != NO_PARTITION)
254 if (SA.partition_to_pseudo[part])
255 gcc_assert (SA.partition_to_pseudo[part] == x);
256 else
257 SA.partition_to_pseudo[part] = x;
259 /* For the benefit of debug information at -O0 (where
260 vartracking doesn't run) record the place also in the base
261 DECL. For PARMs and RESULTs, we may end up resetting these
262 in function.c:maybe_reset_rtl_for_parm, but in some rare
263 cases we may need them (unused and overwritten incoming
264 value, that at -O0 must share the location with the other
265 uses in spite of the missing default def), and this may be
266 the only chance to preserve them. */
267 if (x && x != pc_rtx && SSA_NAME_VAR (t))
269 tree var = SSA_NAME_VAR (t);
270 /* If we don't yet have something recorded, just record it now. */
271 if (!DECL_RTL_SET_P (var))
272 SET_DECL_RTL (var, x);
273 /* If we have it set already to "multiple places" don't
274 change this. */
275 else if (DECL_RTL (var) == pc_rtx)
277 /* If we have something recorded and it's not the same place
278 as we want to record now, we have multiple partitions for the
279 same base variable, with different places. We can't just
280 randomly chose one, hence we have to say that we don't know.
281 This only happens with optimization, and there var-tracking
282 will figure out the right thing. */
283 else if (DECL_RTL (var) != x)
284 SET_DECL_RTL (var, pc_rtx);
287 else
288 SET_DECL_RTL (t, x);
291 /* This structure holds data relevant to one variable that will be
292 placed in a stack slot. */
293 struct stack_var
295 /* The Variable. */
296 tree decl;
298 /* Initially, the size of the variable. Later, the size of the partition,
299 if this variable becomes it's partition's representative. */
300 HOST_WIDE_INT size;
302 /* The *byte* alignment required for this variable. Or as, with the
303 size, the alignment for this partition. */
304 unsigned int alignb;
306 /* The partition representative. */
307 size_t representative;
309 /* The next stack variable in the partition, or EOC. */
310 size_t next;
312 /* The numbers of conflicting stack variables. */
313 bitmap conflicts;
316 #define EOC ((size_t)-1)
318 /* We have an array of such objects while deciding allocation. */
319 static struct stack_var *stack_vars;
320 static size_t stack_vars_alloc;
321 static size_t stack_vars_num;
322 static hash_map<tree, size_t> *decl_to_stack_part;
324 /* Conflict bitmaps go on this obstack. This allows us to destroy
325 all of them in one big sweep. */
326 static bitmap_obstack stack_var_bitmap_obstack;
328 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
329 is non-decreasing. */
330 static size_t *stack_vars_sorted;
332 /* The phase of the stack frame. This is the known misalignment of
333 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
334 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
335 static int frame_phase;
337 /* Used during expand_used_vars to remember if we saw any decls for
338 which we'd like to enable stack smashing protection. */
339 static bool has_protected_decls;
341 /* Used during expand_used_vars. Remember if we say a character buffer
342 smaller than our cutoff threshold. Used for -Wstack-protector. */
343 static bool has_short_buffer;
345 /* Compute the byte alignment to use for DECL. Ignore alignment
346 we can't do with expected alignment of the stack boundary. */
348 static unsigned int
349 align_local_variable (tree decl)
351 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
352 DECL_ALIGN (decl) = align;
353 return align / BITS_PER_UNIT;
356 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
357 down otherwise. Return truncated BASE value. */
359 static inline unsigned HOST_WIDE_INT
360 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
362 return align_up ? (base + align - 1) & -align : base & -align;
365 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
366 Return the frame offset. */
368 static HOST_WIDE_INT
369 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
371 HOST_WIDE_INT offset, new_frame_offset;
373 if (FRAME_GROWS_DOWNWARD)
375 new_frame_offset
376 = align_base (frame_offset - frame_phase - size,
377 align, false) + frame_phase;
378 offset = new_frame_offset;
380 else
382 new_frame_offset
383 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
384 offset = new_frame_offset;
385 new_frame_offset += size;
387 frame_offset = new_frame_offset;
389 if (frame_offset_overflow (frame_offset, cfun->decl))
390 frame_offset = offset = 0;
392 return offset;
395 /* Accumulate DECL into STACK_VARS. */
397 static void
398 add_stack_var (tree decl)
400 struct stack_var *v;
402 if (stack_vars_num >= stack_vars_alloc)
404 if (stack_vars_alloc)
405 stack_vars_alloc = stack_vars_alloc * 3 / 2;
406 else
407 stack_vars_alloc = 32;
408 stack_vars
409 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
411 if (!decl_to_stack_part)
412 decl_to_stack_part = new hash_map<tree, size_t>;
414 v = &stack_vars[stack_vars_num];
415 decl_to_stack_part->put (decl, stack_vars_num);
417 v->decl = decl;
418 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
419 /* Ensure that all variables have size, so that &a != &b for any two
420 variables that are simultaneously live. */
421 if (v->size == 0)
422 v->size = 1;
423 v->alignb = align_local_variable (SSAVAR (decl));
424 /* An alignment of zero can mightily confuse us later. */
425 gcc_assert (v->alignb != 0);
427 /* All variables are initially in their own partition. */
428 v->representative = stack_vars_num;
429 v->next = EOC;
431 /* All variables initially conflict with no other. */
432 v->conflicts = NULL;
434 /* Ensure that this decl doesn't get put onto the list twice. */
435 set_rtl (decl, pc_rtx);
437 stack_vars_num++;
440 /* Make the decls associated with luid's X and Y conflict. */
442 static void
443 add_stack_var_conflict (size_t x, size_t y)
445 struct stack_var *a = &stack_vars[x];
446 struct stack_var *b = &stack_vars[y];
447 if (!a->conflicts)
448 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
449 if (!b->conflicts)
450 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
451 bitmap_set_bit (a->conflicts, y);
452 bitmap_set_bit (b->conflicts, x);
455 /* Check whether the decls associated with luid's X and Y conflict. */
457 static bool
458 stack_var_conflict_p (size_t x, size_t y)
460 struct stack_var *a = &stack_vars[x];
461 struct stack_var *b = &stack_vars[y];
462 if (x == y)
463 return false;
464 /* Partitions containing an SSA name result from gimple registers
465 with things like unsupported modes. They are top-level and
466 hence conflict with everything else. */
467 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
468 return true;
470 if (!a->conflicts || !b->conflicts)
471 return false;
472 return bitmap_bit_p (a->conflicts, y);
475 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
476 enter its partition number into bitmap DATA. */
478 static bool
479 visit_op (gimple, tree op, tree, void *data)
481 bitmap active = (bitmap)data;
482 op = get_base_address (op);
483 if (op
484 && DECL_P (op)
485 && DECL_RTL_IF_SET (op) == pc_rtx)
487 size_t *v = decl_to_stack_part->get (op);
488 if (v)
489 bitmap_set_bit (active, *v);
491 return false;
494 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
495 record conflicts between it and all currently active other partitions
496 from bitmap DATA. */
498 static bool
499 visit_conflict (gimple, tree op, tree, void *data)
501 bitmap active = (bitmap)data;
502 op = get_base_address (op);
503 if (op
504 && DECL_P (op)
505 && DECL_RTL_IF_SET (op) == pc_rtx)
507 size_t *v = decl_to_stack_part->get (op);
508 if (v && bitmap_set_bit (active, *v))
510 size_t num = *v;
511 bitmap_iterator bi;
512 unsigned i;
513 gcc_assert (num < stack_vars_num);
514 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
515 add_stack_var_conflict (num, i);
518 return false;
521 /* Helper routine for add_scope_conflicts, calculating the active partitions
522 at the end of BB, leaving the result in WORK. We're called to generate
523 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
524 liveness. */
526 static void
527 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
529 edge e;
530 edge_iterator ei;
531 gimple_stmt_iterator gsi;
532 walk_stmt_load_store_addr_fn visit;
534 bitmap_clear (work);
535 FOR_EACH_EDGE (e, ei, bb->preds)
536 bitmap_ior_into (work, (bitmap)e->src->aux);
538 visit = visit_op;
540 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
542 gimple stmt = gsi_stmt (gsi);
543 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
545 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
547 gimple stmt = gsi_stmt (gsi);
549 if (gimple_clobber_p (stmt))
551 tree lhs = gimple_assign_lhs (stmt);
552 size_t *v;
553 /* Nested function lowering might introduce LHSs
554 that are COMPONENT_REFs. */
555 if (TREE_CODE (lhs) != VAR_DECL)
556 continue;
557 if (DECL_RTL_IF_SET (lhs) == pc_rtx
558 && (v = decl_to_stack_part->get (lhs)))
559 bitmap_clear_bit (work, *v);
561 else if (!is_gimple_debug (stmt))
563 if (for_conflict
564 && visit == visit_op)
566 /* If this is the first real instruction in this BB we need
567 to add conflicts for everything live at this point now.
568 Unlike classical liveness for named objects we can't
569 rely on seeing a def/use of the names we're interested in.
570 There might merely be indirect loads/stores. We'd not add any
571 conflicts for such partitions. */
572 bitmap_iterator bi;
573 unsigned i;
574 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
576 struct stack_var *a = &stack_vars[i];
577 if (!a->conflicts)
578 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
579 bitmap_ior_into (a->conflicts, work);
581 visit = visit_conflict;
583 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
588 /* Generate stack partition conflicts between all partitions that are
589 simultaneously live. */
591 static void
592 add_scope_conflicts (void)
594 basic_block bb;
595 bool changed;
596 bitmap work = BITMAP_ALLOC (NULL);
597 int *rpo;
598 int n_bbs;
600 /* We approximate the live range of a stack variable by taking the first
601 mention of its name as starting point(s), and by the end-of-scope
602 death clobber added by gimplify as ending point(s) of the range.
603 This overapproximates in the case we for instance moved an address-taken
604 operation upward, without also moving a dereference to it upwards.
605 But it's conservatively correct as a variable never can hold values
606 before its name is mentioned at least once.
608 We then do a mostly classical bitmap liveness algorithm. */
610 FOR_ALL_BB_FN (bb, cfun)
611 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
613 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
614 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
616 changed = true;
617 while (changed)
619 int i;
620 changed = false;
621 for (i = 0; i < n_bbs; i++)
623 bitmap active;
624 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
625 active = (bitmap)bb->aux;
626 add_scope_conflicts_1 (bb, work, false);
627 if (bitmap_ior_into (active, work))
628 changed = true;
632 FOR_EACH_BB_FN (bb, cfun)
633 add_scope_conflicts_1 (bb, work, true);
635 free (rpo);
636 BITMAP_FREE (work);
637 FOR_ALL_BB_FN (bb, cfun)
638 BITMAP_FREE (bb->aux);
641 /* A subroutine of partition_stack_vars. A comparison function for qsort,
642 sorting an array of indices by the properties of the object. */
644 static int
645 stack_var_cmp (const void *a, const void *b)
647 size_t ia = *(const size_t *)a;
648 size_t ib = *(const size_t *)b;
649 unsigned int aligna = stack_vars[ia].alignb;
650 unsigned int alignb = stack_vars[ib].alignb;
651 HOST_WIDE_INT sizea = stack_vars[ia].size;
652 HOST_WIDE_INT sizeb = stack_vars[ib].size;
653 tree decla = stack_vars[ia].decl;
654 tree declb = stack_vars[ib].decl;
655 bool largea, largeb;
656 unsigned int uida, uidb;
658 /* Primary compare on "large" alignment. Large comes first. */
659 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
660 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
661 if (largea != largeb)
662 return (int)largeb - (int)largea;
664 /* Secondary compare on size, decreasing */
665 if (sizea > sizeb)
666 return -1;
667 if (sizea < sizeb)
668 return 1;
670 /* Tertiary compare on true alignment, decreasing. */
671 if (aligna < alignb)
672 return -1;
673 if (aligna > alignb)
674 return 1;
676 /* Final compare on ID for sort stability, increasing.
677 Two SSA names are compared by their version, SSA names come before
678 non-SSA names, and two normal decls are compared by their DECL_UID. */
679 if (TREE_CODE (decla) == SSA_NAME)
681 if (TREE_CODE (declb) == SSA_NAME)
682 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
683 else
684 return -1;
686 else if (TREE_CODE (declb) == SSA_NAME)
687 return 1;
688 else
689 uida = DECL_UID (decla), uidb = DECL_UID (declb);
690 if (uida < uidb)
691 return 1;
692 if (uida > uidb)
693 return -1;
694 return 0;
697 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
698 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
700 /* If the points-to solution *PI points to variables that are in a partition
701 together with other variables add all partition members to the pointed-to
702 variables bitmap. */
704 static void
705 add_partitioned_vars_to_ptset (struct pt_solution *pt,
706 part_hashmap *decls_to_partitions,
707 hash_set<bitmap> *visited, bitmap temp)
709 bitmap_iterator bi;
710 unsigned i;
711 bitmap *part;
713 if (pt->anything
714 || pt->vars == NULL
715 /* The pointed-to vars bitmap is shared, it is enough to
716 visit it once. */
717 || visited->add (pt->vars))
718 return;
720 bitmap_clear (temp);
722 /* By using a temporary bitmap to store all members of the partitions
723 we have to add we make sure to visit each of the partitions only
724 once. */
725 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
726 if ((!temp
727 || !bitmap_bit_p (temp, i))
728 && (part = decls_to_partitions->get (i)))
729 bitmap_ior_into (temp, *part);
730 if (!bitmap_empty_p (temp))
731 bitmap_ior_into (pt->vars, temp);
734 /* Update points-to sets based on partition info, so we can use them on RTL.
735 The bitmaps representing stack partitions will be saved until expand,
736 where partitioned decls used as bases in memory expressions will be
737 rewritten. */
739 static void
740 update_alias_info_with_stack_vars (void)
742 part_hashmap *decls_to_partitions = NULL;
743 size_t i, j;
744 tree var = NULL_TREE;
746 for (i = 0; i < stack_vars_num; i++)
748 bitmap part = NULL;
749 tree name;
750 struct ptr_info_def *pi;
752 /* Not interested in partitions with single variable. */
753 if (stack_vars[i].representative != i
754 || stack_vars[i].next == EOC)
755 continue;
757 if (!decls_to_partitions)
759 decls_to_partitions = new part_hashmap;
760 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
763 /* Create an SSA_NAME that points to the partition for use
764 as base during alias-oracle queries on RTL for bases that
765 have been partitioned. */
766 if (var == NULL_TREE)
767 var = create_tmp_var (ptr_type_node);
768 name = make_ssa_name (var);
770 /* Create bitmaps representing partitions. They will be used for
771 points-to sets later, so use GGC alloc. */
772 part = BITMAP_GGC_ALLOC ();
773 for (j = i; j != EOC; j = stack_vars[j].next)
775 tree decl = stack_vars[j].decl;
776 unsigned int uid = DECL_PT_UID (decl);
777 bitmap_set_bit (part, uid);
778 decls_to_partitions->put (uid, part);
779 cfun->gimple_df->decls_to_pointers->put (decl, name);
780 if (TREE_ADDRESSABLE (decl))
781 TREE_ADDRESSABLE (name) = 1;
784 /* Make the SSA name point to all partition members. */
785 pi = get_ptr_info (name);
786 pt_solution_set (&pi->pt, part, false);
789 /* Make all points-to sets that contain one member of a partition
790 contain all members of the partition. */
791 if (decls_to_partitions)
793 unsigned i;
794 hash_set<bitmap> visited;
795 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
797 for (i = 1; i < num_ssa_names; i++)
799 tree name = ssa_name (i);
800 struct ptr_info_def *pi;
802 if (name
803 && POINTER_TYPE_P (TREE_TYPE (name))
804 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
805 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
806 &visited, temp);
809 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
810 decls_to_partitions, &visited, temp);
812 delete decls_to_partitions;
813 BITMAP_FREE (temp);
817 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
818 partitioning algorithm. Partitions A and B are known to be non-conflicting.
819 Merge them into a single partition A. */
821 static void
822 union_stack_vars (size_t a, size_t b)
824 struct stack_var *vb = &stack_vars[b];
825 bitmap_iterator bi;
826 unsigned u;
828 gcc_assert (stack_vars[b].next == EOC);
829 /* Add B to A's partition. */
830 stack_vars[b].next = stack_vars[a].next;
831 stack_vars[b].representative = a;
832 stack_vars[a].next = b;
834 /* Update the required alignment of partition A to account for B. */
835 if (stack_vars[a].alignb < stack_vars[b].alignb)
836 stack_vars[a].alignb = stack_vars[b].alignb;
838 /* Update the interference graph and merge the conflicts. */
839 if (vb->conflicts)
841 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
842 add_stack_var_conflict (a, stack_vars[u].representative);
843 BITMAP_FREE (vb->conflicts);
847 /* A subroutine of expand_used_vars. Binpack the variables into
848 partitions constrained by the interference graph. The overall
849 algorithm used is as follows:
851 Sort the objects by size in descending order.
852 For each object A {
853 S = size(A)
854 O = 0
855 loop {
856 Look for the largest non-conflicting object B with size <= S.
857 UNION (A, B)
862 static void
863 partition_stack_vars (void)
865 size_t si, sj, n = stack_vars_num;
867 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
868 for (si = 0; si < n; ++si)
869 stack_vars_sorted[si] = si;
871 if (n == 1)
872 return;
874 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
876 for (si = 0; si < n; ++si)
878 size_t i = stack_vars_sorted[si];
879 unsigned int ialign = stack_vars[i].alignb;
880 HOST_WIDE_INT isize = stack_vars[i].size;
882 /* Ignore objects that aren't partition representatives. If we
883 see a var that is not a partition representative, it must
884 have been merged earlier. */
885 if (stack_vars[i].representative != i)
886 continue;
888 for (sj = si + 1; sj < n; ++sj)
890 size_t j = stack_vars_sorted[sj];
891 unsigned int jalign = stack_vars[j].alignb;
892 HOST_WIDE_INT jsize = stack_vars[j].size;
894 /* Ignore objects that aren't partition representatives. */
895 if (stack_vars[j].representative != j)
896 continue;
898 /* Do not mix objects of "small" (supported) alignment
899 and "large" (unsupported) alignment. */
900 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
901 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
902 break;
904 /* For Address Sanitizer do not mix objects with different
905 sizes, as the shorter vars wouldn't be adequately protected.
906 Don't do that for "large" (unsupported) alignment objects,
907 those aren't protected anyway. */
908 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
909 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
910 break;
912 /* Ignore conflicting objects. */
913 if (stack_var_conflict_p (i, j))
914 continue;
916 /* UNION the objects, placing J at OFFSET. */
917 union_stack_vars (i, j);
921 update_alias_info_with_stack_vars ();
924 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
926 static void
927 dump_stack_var_partition (void)
929 size_t si, i, j, n = stack_vars_num;
931 for (si = 0; si < n; ++si)
933 i = stack_vars_sorted[si];
935 /* Skip variables that aren't partition representatives, for now. */
936 if (stack_vars[i].representative != i)
937 continue;
939 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
940 " align %u\n", (unsigned long) i, stack_vars[i].size,
941 stack_vars[i].alignb);
943 for (j = i; j != EOC; j = stack_vars[j].next)
945 fputc ('\t', dump_file);
946 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
948 fputc ('\n', dump_file);
952 /* Assign rtl to DECL at BASE + OFFSET. */
954 static void
955 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
956 HOST_WIDE_INT offset)
958 unsigned align;
959 rtx x;
961 /* If this fails, we've overflowed the stack frame. Error nicely? */
962 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
964 x = plus_constant (Pmode, base, offset);
965 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
966 ? TYPE_MODE (TREE_TYPE (decl))
967 : DECL_MODE (SSAVAR (decl)), x);
969 if (TREE_CODE (decl) != SSA_NAME)
971 /* Set alignment we actually gave this decl if it isn't an SSA name.
972 If it is we generate stack slots only accidentally so it isn't as
973 important, we'll simply use the alignment that is already set. */
974 if (base == virtual_stack_vars_rtx)
975 offset -= frame_phase;
976 align = offset & -offset;
977 align *= BITS_PER_UNIT;
978 if (align == 0 || align > base_align)
979 align = base_align;
981 /* One would think that we could assert that we're not decreasing
982 alignment here, but (at least) the i386 port does exactly this
983 via the MINIMUM_ALIGNMENT hook. */
985 DECL_ALIGN (decl) = align;
986 DECL_USER_ALIGN (decl) = 0;
989 set_rtl (decl, x);
992 struct stack_vars_data
994 /* Vector of offset pairs, always end of some padding followed
995 by start of the padding that needs Address Sanitizer protection.
996 The vector is in reversed, highest offset pairs come first. */
997 vec<HOST_WIDE_INT> asan_vec;
999 /* Vector of partition representative decls in between the paddings. */
1000 vec<tree> asan_decl_vec;
1002 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1003 rtx asan_base;
1005 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1006 unsigned int asan_alignb;
1009 /* A subroutine of expand_used_vars. Give each partition representative
1010 a unique location within the stack frame. Update each partition member
1011 with that location. */
1013 static void
1014 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1016 size_t si, i, j, n = stack_vars_num;
1017 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1018 rtx large_base = NULL;
1019 unsigned large_align = 0;
1020 tree decl;
1022 /* Determine if there are any variables requiring "large" alignment.
1023 Since these are dynamically allocated, we only process these if
1024 no predicate involved. */
1025 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1026 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1028 /* Find the total size of these variables. */
1029 for (si = 0; si < n; ++si)
1031 unsigned alignb;
1033 i = stack_vars_sorted[si];
1034 alignb = stack_vars[i].alignb;
1036 /* All "large" alignment decls come before all "small" alignment
1037 decls, but "large" alignment decls are not sorted based on
1038 their alignment. Increase large_align to track the largest
1039 required alignment. */
1040 if ((alignb * BITS_PER_UNIT) > large_align)
1041 large_align = alignb * BITS_PER_UNIT;
1043 /* Stop when we get to the first decl with "small" alignment. */
1044 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1045 break;
1047 /* Skip variables that aren't partition representatives. */
1048 if (stack_vars[i].representative != i)
1049 continue;
1051 /* Skip variables that have already had rtl assigned. See also
1052 add_stack_var where we perpetrate this pc_rtx hack. */
1053 decl = stack_vars[i].decl;
1054 if ((TREE_CODE (decl) == SSA_NAME
1055 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1056 : DECL_RTL (decl)) != pc_rtx)
1057 continue;
1059 large_size += alignb - 1;
1060 large_size &= -(HOST_WIDE_INT)alignb;
1061 large_size += stack_vars[i].size;
1064 /* If there were any, allocate space. */
1065 if (large_size > 0)
1066 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1067 large_align, true);
1070 for (si = 0; si < n; ++si)
1072 rtx base;
1073 unsigned base_align, alignb;
1074 HOST_WIDE_INT offset;
1076 i = stack_vars_sorted[si];
1078 /* Skip variables that aren't partition representatives, for now. */
1079 if (stack_vars[i].representative != i)
1080 continue;
1082 /* Skip variables that have already had rtl assigned. See also
1083 add_stack_var where we perpetrate this pc_rtx hack. */
1084 decl = stack_vars[i].decl;
1085 if ((TREE_CODE (decl) == SSA_NAME
1086 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1087 : DECL_RTL (decl)) != pc_rtx)
1088 continue;
1090 /* Check the predicate to see whether this variable should be
1091 allocated in this pass. */
1092 if (pred && !pred (i))
1093 continue;
1095 alignb = stack_vars[i].alignb;
1096 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1098 base = virtual_stack_vars_rtx;
1099 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1101 HOST_WIDE_INT prev_offset
1102 = align_base (frame_offset,
1103 MAX (alignb, ASAN_RED_ZONE_SIZE),
1104 FRAME_GROWS_DOWNWARD);
1105 tree repr_decl = NULL_TREE;
1106 offset
1107 = alloc_stack_frame_space (stack_vars[i].size
1108 + ASAN_RED_ZONE_SIZE,
1109 MAX (alignb, ASAN_RED_ZONE_SIZE));
1111 data->asan_vec.safe_push (prev_offset);
1112 data->asan_vec.safe_push (offset + stack_vars[i].size);
1113 /* Find best representative of the partition.
1114 Prefer those with DECL_NAME, even better
1115 satisfying asan_protect_stack_decl predicate. */
1116 for (j = i; j != EOC; j = stack_vars[j].next)
1117 if (asan_protect_stack_decl (stack_vars[j].decl)
1118 && DECL_NAME (stack_vars[j].decl))
1120 repr_decl = stack_vars[j].decl;
1121 break;
1123 else if (repr_decl == NULL_TREE
1124 && DECL_P (stack_vars[j].decl)
1125 && DECL_NAME (stack_vars[j].decl))
1126 repr_decl = stack_vars[j].decl;
1127 if (repr_decl == NULL_TREE)
1128 repr_decl = stack_vars[i].decl;
1129 data->asan_decl_vec.safe_push (repr_decl);
1130 data->asan_alignb = MAX (data->asan_alignb, alignb);
1131 if (data->asan_base == NULL)
1132 data->asan_base = gen_reg_rtx (Pmode);
1133 base = data->asan_base;
1135 if (!STRICT_ALIGNMENT)
1136 base_align = crtl->max_used_stack_slot_alignment;
1137 else
1138 base_align = MAX (crtl->max_used_stack_slot_alignment,
1139 GET_MODE_ALIGNMENT (SImode)
1140 << ASAN_SHADOW_SHIFT);
1142 else
1144 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1145 base_align = crtl->max_used_stack_slot_alignment;
1148 else
1150 /* Large alignment is only processed in the last pass. */
1151 if (pred)
1152 continue;
1153 gcc_assert (large_base != NULL);
1155 large_alloc += alignb - 1;
1156 large_alloc &= -(HOST_WIDE_INT)alignb;
1157 offset = large_alloc;
1158 large_alloc += stack_vars[i].size;
1160 base = large_base;
1161 base_align = large_align;
1164 /* Create rtl for each variable based on their location within the
1165 partition. */
1166 for (j = i; j != EOC; j = stack_vars[j].next)
1168 expand_one_stack_var_at (stack_vars[j].decl,
1169 base, base_align,
1170 offset);
1174 gcc_assert (large_alloc == large_size);
1177 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1178 static HOST_WIDE_INT
1179 account_stack_vars (void)
1181 size_t si, j, i, n = stack_vars_num;
1182 HOST_WIDE_INT size = 0;
1184 for (si = 0; si < n; ++si)
1186 i = stack_vars_sorted[si];
1188 /* Skip variables that aren't partition representatives, for now. */
1189 if (stack_vars[i].representative != i)
1190 continue;
1192 size += stack_vars[i].size;
1193 for (j = i; j != EOC; j = stack_vars[j].next)
1194 set_rtl (stack_vars[j].decl, NULL);
1196 return size;
1199 /* A subroutine of expand_one_var. Called to immediately assign rtl
1200 to a variable to be allocated in the stack frame. */
1202 static void
1203 expand_one_stack_var_1 (tree var)
1205 HOST_WIDE_INT size, offset;
1206 unsigned byte_align;
1208 if (TREE_CODE (var) == SSA_NAME)
1210 tree type = TREE_TYPE (var);
1211 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1212 byte_align = TYPE_ALIGN_UNIT (type);
1214 else
1216 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1217 byte_align = align_local_variable (var);
1220 /* We handle highly aligned variables in expand_stack_vars. */
1221 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1223 offset = alloc_stack_frame_space (size, byte_align);
1225 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1226 crtl->max_used_stack_slot_alignment, offset);
1229 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1230 already assigned some MEM. */
1232 static void
1233 expand_one_stack_var (tree var)
1235 if (TREE_CODE (var) == SSA_NAME)
1237 int part = var_to_partition (SA.map, var);
1238 if (part != NO_PARTITION)
1240 rtx x = SA.partition_to_pseudo[part];
1241 gcc_assert (x);
1242 gcc_assert (MEM_P (x));
1243 return;
1247 return expand_one_stack_var_1 (var);
1250 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1251 that will reside in a hard register. */
1253 static void
1254 expand_one_hard_reg_var (tree var)
1256 rest_of_decl_compilation (var, 0, 0);
1259 /* Record the alignment requirements of some variable assigned to a
1260 pseudo. */
1262 static void
1263 record_alignment_for_reg_var (unsigned int align)
1265 if (SUPPORTS_STACK_ALIGNMENT
1266 && crtl->stack_alignment_estimated < align)
1268 /* stack_alignment_estimated shouldn't change after stack
1269 realign decision made */
1270 gcc_assert (!crtl->stack_realign_processed);
1271 crtl->stack_alignment_estimated = align;
1274 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1275 So here we only make sure stack_alignment_needed >= align. */
1276 if (crtl->stack_alignment_needed < align)
1277 crtl->stack_alignment_needed = align;
1278 if (crtl->max_used_stack_slot_alignment < align)
1279 crtl->max_used_stack_slot_alignment = align;
1282 /* Create RTL for an SSA partition. */
1284 static void
1285 expand_one_ssa_partition (tree var)
1287 int part = var_to_partition (SA.map, var);
1288 gcc_assert (part != NO_PARTITION);
1290 if (SA.partition_to_pseudo[part])
1291 return;
1293 if (!use_register_for_decl (var))
1295 expand_one_stack_var_1 (var);
1296 return;
1299 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1300 TYPE_MODE (TREE_TYPE (var)),
1301 TYPE_ALIGN (TREE_TYPE (var)));
1303 /* If the variable alignment is very large we'll dynamicaly allocate
1304 it, which means that in-frame portion is just a pointer. */
1305 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1306 align = POINTER_SIZE;
1308 record_alignment_for_reg_var (align);
1310 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1312 rtx x = gen_reg_rtx (reg_mode);
1314 set_rtl (var, x);
1317 /* Record the association between the RTL generated for a partition
1318 and the underlying variable of the SSA_NAME. */
1320 static void
1321 adjust_one_expanded_partition_var (tree var)
1323 if (!var)
1324 return;
1326 tree decl = SSA_NAME_VAR (var);
1328 int part = var_to_partition (SA.map, var);
1329 if (part == NO_PARTITION)
1330 return;
1332 rtx x = SA.partition_to_pseudo[part];
1334 set_rtl (var, x);
1336 if (!REG_P (x))
1337 return;
1339 /* Note if the object is a user variable. */
1340 if (decl && !DECL_ARTIFICIAL (decl))
1341 mark_user_reg (x);
1343 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1344 mark_reg_pointer (x, get_pointer_alignment (var));
1347 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1348 that will reside in a pseudo register. */
1350 static void
1351 expand_one_register_var (tree var)
1353 if (TREE_CODE (var) == SSA_NAME)
1355 int part = var_to_partition (SA.map, var);
1356 if (part != NO_PARTITION)
1358 rtx x = SA.partition_to_pseudo[part];
1359 gcc_assert (x);
1360 gcc_assert (REG_P (x));
1361 return;
1363 gcc_unreachable ();
1366 tree decl = var;
1367 tree type = TREE_TYPE (decl);
1368 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1369 rtx x = gen_reg_rtx (reg_mode);
1371 set_rtl (var, x);
1373 /* Note if the object is a user variable. */
1374 if (!DECL_ARTIFICIAL (decl))
1375 mark_user_reg (x);
1377 if (POINTER_TYPE_P (type))
1378 mark_reg_pointer (x, get_pointer_alignment (var));
1381 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1382 has some associated error, e.g. its type is error-mark. We just need
1383 to pick something that won't crash the rest of the compiler. */
1385 static void
1386 expand_one_error_var (tree var)
1388 machine_mode mode = DECL_MODE (var);
1389 rtx x;
1391 if (mode == BLKmode)
1392 x = gen_rtx_MEM (BLKmode, const0_rtx);
1393 else if (mode == VOIDmode)
1394 x = const0_rtx;
1395 else
1396 x = gen_reg_rtx (mode);
1398 SET_DECL_RTL (var, x);
1401 /* A subroutine of expand_one_var. VAR is a variable that will be
1402 allocated to the local stack frame. Return true if we wish to
1403 add VAR to STACK_VARS so that it will be coalesced with other
1404 variables. Return false to allocate VAR immediately.
1406 This function is used to reduce the number of variables considered
1407 for coalescing, which reduces the size of the quadratic problem. */
1409 static bool
1410 defer_stack_allocation (tree var, bool toplevel)
1412 /* Whether the variable is small enough for immediate allocation not to be
1413 a problem with regard to the frame size. */
1414 bool smallish
1415 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1416 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1418 /* If stack protection is enabled, *all* stack variables must be deferred,
1419 so that we can re-order the strings to the top of the frame.
1420 Similarly for Address Sanitizer. */
1421 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1422 return true;
1424 /* We handle "large" alignment via dynamic allocation. We want to handle
1425 this extra complication in only one place, so defer them. */
1426 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1427 return true;
1429 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1430 might be detached from their block and appear at toplevel when we reach
1431 here. We want to coalesce them with variables from other blocks when
1432 the immediate contribution to the frame size would be noticeable. */
1433 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1434 return true;
1436 /* Variables declared in the outermost scope automatically conflict
1437 with every other variable. The only reason to want to defer them
1438 at all is that, after sorting, we can more efficiently pack
1439 small variables in the stack frame. Continue to defer at -O2. */
1440 if (toplevel && optimize < 2)
1441 return false;
1443 /* Without optimization, *most* variables are allocated from the
1444 stack, which makes the quadratic problem large exactly when we
1445 want compilation to proceed as quickly as possible. On the
1446 other hand, we don't want the function's stack frame size to
1447 get completely out of hand. So we avoid adding scalars and
1448 "small" aggregates to the list at all. */
1449 if (optimize == 0 && smallish)
1450 return false;
1452 return true;
1455 /* A subroutine of expand_used_vars. Expand one variable according to
1456 its flavor. Variables to be placed on the stack are not actually
1457 expanded yet, merely recorded.
1458 When REALLY_EXPAND is false, only add stack values to be allocated.
1459 Return stack usage this variable is supposed to take.
1462 static HOST_WIDE_INT
1463 expand_one_var (tree var, bool toplevel, bool really_expand)
1465 unsigned int align = BITS_PER_UNIT;
1466 tree origvar = var;
1468 var = SSAVAR (var);
1470 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1472 /* Because we don't know if VAR will be in register or on stack,
1473 we conservatively assume it will be on stack even if VAR is
1474 eventually put into register after RA pass. For non-automatic
1475 variables, which won't be on stack, we collect alignment of
1476 type and ignore user specified alignment. Similarly for
1477 SSA_NAMEs for which use_register_for_decl returns true. */
1478 if (TREE_STATIC (var)
1479 || DECL_EXTERNAL (var)
1480 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1481 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1482 TYPE_MODE (TREE_TYPE (var)),
1483 TYPE_ALIGN (TREE_TYPE (var)));
1484 else if (DECL_HAS_VALUE_EXPR_P (var)
1485 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1486 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1487 or variables which were assigned a stack slot already by
1488 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1489 changed from the offset chosen to it. */
1490 align = crtl->stack_alignment_estimated;
1491 else
1492 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1494 /* If the variable alignment is very large we'll dynamicaly allocate
1495 it, which means that in-frame portion is just a pointer. */
1496 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1497 align = POINTER_SIZE;
1500 record_alignment_for_reg_var (align);
1502 if (TREE_CODE (origvar) == SSA_NAME)
1504 gcc_assert (TREE_CODE (var) != VAR_DECL
1505 || (!DECL_EXTERNAL (var)
1506 && !DECL_HAS_VALUE_EXPR_P (var)
1507 && !TREE_STATIC (var)
1508 && TREE_TYPE (var) != error_mark_node
1509 && !DECL_HARD_REGISTER (var)
1510 && really_expand));
1512 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1514 else if (DECL_EXTERNAL (var))
1516 else if (DECL_HAS_VALUE_EXPR_P (var))
1518 else if (TREE_STATIC (var))
1520 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1522 else if (TREE_TYPE (var) == error_mark_node)
1524 if (really_expand)
1525 expand_one_error_var (var);
1527 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1529 if (really_expand)
1531 expand_one_hard_reg_var (var);
1532 if (!DECL_HARD_REGISTER (var))
1533 /* Invalid register specification. */
1534 expand_one_error_var (var);
1537 else if (use_register_for_decl (var))
1539 if (really_expand)
1540 expand_one_register_var (origvar);
1542 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1544 /* Reject variables which cover more than half of the address-space. */
1545 if (really_expand)
1547 error ("size of variable %q+D is too large", var);
1548 expand_one_error_var (var);
1551 else if (defer_stack_allocation (var, toplevel))
1552 add_stack_var (origvar);
1553 else
1555 if (really_expand)
1556 expand_one_stack_var (origvar);
1557 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1559 return 0;
1562 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1563 expanding variables. Those variables that can be put into registers
1564 are allocated pseudos; those that can't are put on the stack.
1566 TOPLEVEL is true if this is the outermost BLOCK. */
1568 static void
1569 expand_used_vars_for_block (tree block, bool toplevel)
1571 tree t;
1573 /* Expand all variables at this level. */
1574 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1575 if (TREE_USED (t)
1576 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1577 || !DECL_NONSHAREABLE (t)))
1578 expand_one_var (t, toplevel, true);
1580 /* Expand all variables at containing levels. */
1581 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1582 expand_used_vars_for_block (t, false);
1585 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1586 and clear TREE_USED on all local variables. */
1588 static void
1589 clear_tree_used (tree block)
1591 tree t;
1593 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1594 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1595 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1596 || !DECL_NONSHAREABLE (t))
1597 TREE_USED (t) = 0;
1599 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1600 clear_tree_used (t);
1603 enum {
1604 SPCT_FLAG_DEFAULT = 1,
1605 SPCT_FLAG_ALL = 2,
1606 SPCT_FLAG_STRONG = 3,
1607 SPCT_FLAG_EXPLICIT = 4
1610 /* Examine TYPE and determine a bit mask of the following features. */
1612 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1613 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1614 #define SPCT_HAS_ARRAY 4
1615 #define SPCT_HAS_AGGREGATE 8
1617 static unsigned int
1618 stack_protect_classify_type (tree type)
1620 unsigned int ret = 0;
1621 tree t;
1623 switch (TREE_CODE (type))
1625 case ARRAY_TYPE:
1626 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1627 if (t == char_type_node
1628 || t == signed_char_type_node
1629 || t == unsigned_char_type_node)
1631 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1632 unsigned HOST_WIDE_INT len;
1634 if (!TYPE_SIZE_UNIT (type)
1635 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1636 len = max;
1637 else
1638 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1640 if (len < max)
1641 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1642 else
1643 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1645 else
1646 ret = SPCT_HAS_ARRAY;
1647 break;
1649 case UNION_TYPE:
1650 case QUAL_UNION_TYPE:
1651 case RECORD_TYPE:
1652 ret = SPCT_HAS_AGGREGATE;
1653 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1654 if (TREE_CODE (t) == FIELD_DECL)
1655 ret |= stack_protect_classify_type (TREE_TYPE (t));
1656 break;
1658 default:
1659 break;
1662 return ret;
1665 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1666 part of the local stack frame. Remember if we ever return nonzero for
1667 any variable in this function. The return value is the phase number in
1668 which the variable should be allocated. */
1670 static int
1671 stack_protect_decl_phase (tree decl)
1673 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1674 int ret = 0;
1676 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1677 has_short_buffer = true;
1679 if (flag_stack_protect == SPCT_FLAG_ALL
1680 || flag_stack_protect == SPCT_FLAG_STRONG
1681 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1682 && lookup_attribute ("stack_protect",
1683 DECL_ATTRIBUTES (current_function_decl))))
1685 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1686 && !(bits & SPCT_HAS_AGGREGATE))
1687 ret = 1;
1688 else if (bits & SPCT_HAS_ARRAY)
1689 ret = 2;
1691 else
1692 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1694 if (ret)
1695 has_protected_decls = true;
1697 return ret;
1700 /* Two helper routines that check for phase 1 and phase 2. These are used
1701 as callbacks for expand_stack_vars. */
1703 static bool
1704 stack_protect_decl_phase_1 (size_t i)
1706 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1709 static bool
1710 stack_protect_decl_phase_2 (size_t i)
1712 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1715 /* And helper function that checks for asan phase (with stack protector
1716 it is phase 3). This is used as callback for expand_stack_vars.
1717 Returns true if any of the vars in the partition need to be protected. */
1719 static bool
1720 asan_decl_phase_3 (size_t i)
1722 while (i != EOC)
1724 if (asan_protect_stack_decl (stack_vars[i].decl))
1725 return true;
1726 i = stack_vars[i].next;
1728 return false;
1731 /* Ensure that variables in different stack protection phases conflict
1732 so that they are not merged and share the same stack slot. */
1734 static void
1735 add_stack_protection_conflicts (void)
1737 size_t i, j, n = stack_vars_num;
1738 unsigned char *phase;
1740 phase = XNEWVEC (unsigned char, n);
1741 for (i = 0; i < n; ++i)
1742 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1744 for (i = 0; i < n; ++i)
1746 unsigned char ph_i = phase[i];
1747 for (j = i + 1; j < n; ++j)
1748 if (ph_i != phase[j])
1749 add_stack_var_conflict (i, j);
1752 XDELETEVEC (phase);
1755 /* Create a decl for the guard at the top of the stack frame. */
1757 static void
1758 create_stack_guard (void)
1760 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1761 VAR_DECL, NULL, ptr_type_node);
1762 TREE_THIS_VOLATILE (guard) = 1;
1763 TREE_USED (guard) = 1;
1764 expand_one_stack_var (guard);
1765 crtl->stack_protect_guard = guard;
1768 /* Prepare for expanding variables. */
1769 static void
1770 init_vars_expansion (void)
1772 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1773 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1775 /* A map from decl to stack partition. */
1776 decl_to_stack_part = new hash_map<tree, size_t>;
1778 /* Initialize local stack smashing state. */
1779 has_protected_decls = false;
1780 has_short_buffer = false;
1783 /* Free up stack variable graph data. */
1784 static void
1785 fini_vars_expansion (void)
1787 bitmap_obstack_release (&stack_var_bitmap_obstack);
1788 if (stack_vars)
1789 XDELETEVEC (stack_vars);
1790 if (stack_vars_sorted)
1791 XDELETEVEC (stack_vars_sorted);
1792 stack_vars = NULL;
1793 stack_vars_sorted = NULL;
1794 stack_vars_alloc = stack_vars_num = 0;
1795 delete decl_to_stack_part;
1796 decl_to_stack_part = NULL;
1799 /* Make a fair guess for the size of the stack frame of the function
1800 in NODE. This doesn't have to be exact, the result is only used in
1801 the inline heuristics. So we don't want to run the full stack var
1802 packing algorithm (which is quadratic in the number of stack vars).
1803 Instead, we calculate the total size of all stack vars. This turns
1804 out to be a pretty fair estimate -- packing of stack vars doesn't
1805 happen very often. */
1807 HOST_WIDE_INT
1808 estimated_stack_frame_size (struct cgraph_node *node)
1810 HOST_WIDE_INT size = 0;
1811 size_t i;
1812 tree var;
1813 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1815 push_cfun (fn);
1817 init_vars_expansion ();
1819 FOR_EACH_LOCAL_DECL (fn, i, var)
1820 if (auto_var_in_fn_p (var, fn->decl))
1821 size += expand_one_var (var, true, false);
1823 if (stack_vars_num > 0)
1825 /* Fake sorting the stack vars for account_stack_vars (). */
1826 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1827 for (i = 0; i < stack_vars_num; ++i)
1828 stack_vars_sorted[i] = i;
1829 size += account_stack_vars ();
1832 fini_vars_expansion ();
1833 pop_cfun ();
1834 return size;
1837 /* Helper routine to check if a record or union contains an array field. */
1839 static int
1840 record_or_union_type_has_array_p (const_tree tree_type)
1842 tree fields = TYPE_FIELDS (tree_type);
1843 tree f;
1845 for (f = fields; f; f = DECL_CHAIN (f))
1846 if (TREE_CODE (f) == FIELD_DECL)
1848 tree field_type = TREE_TYPE (f);
1849 if (RECORD_OR_UNION_TYPE_P (field_type)
1850 && record_or_union_type_has_array_p (field_type))
1851 return 1;
1852 if (TREE_CODE (field_type) == ARRAY_TYPE)
1853 return 1;
1855 return 0;
1858 /* Check if the current function has local referenced variables that
1859 have their addresses taken, contain an array, or are arrays. */
1861 static bool
1862 stack_protect_decl_p ()
1864 unsigned i;
1865 tree var;
1867 FOR_EACH_LOCAL_DECL (cfun, i, var)
1868 if (!is_global_var (var))
1870 tree var_type = TREE_TYPE (var);
1871 if (TREE_CODE (var) == VAR_DECL
1872 && (TREE_CODE (var_type) == ARRAY_TYPE
1873 || TREE_ADDRESSABLE (var)
1874 || (RECORD_OR_UNION_TYPE_P (var_type)
1875 && record_or_union_type_has_array_p (var_type))))
1876 return true;
1878 return false;
1881 /* Check if the current function has calls that use a return slot. */
1883 static bool
1884 stack_protect_return_slot_p ()
1886 basic_block bb;
1888 FOR_ALL_BB_FN (bb, cfun)
1889 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1890 !gsi_end_p (gsi); gsi_next (&gsi))
1892 gimple stmt = gsi_stmt (gsi);
1893 /* This assumes that calls to internal-only functions never
1894 use a return slot. */
1895 if (is_gimple_call (stmt)
1896 && !gimple_call_internal_p (stmt)
1897 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1898 gimple_call_fndecl (stmt)))
1899 return true;
1901 return false;
1904 /* Expand all variables used in the function. */
1906 static rtx_insn *
1907 expand_used_vars (void)
1909 tree var, outer_block = DECL_INITIAL (current_function_decl);
1910 vec<tree> maybe_local_decls = vNULL;
1911 rtx_insn *var_end_seq = NULL;
1912 unsigned i;
1913 unsigned len;
1914 bool gen_stack_protect_signal = false;
1916 /* Compute the phase of the stack frame for this function. */
1918 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1919 int off = STARTING_FRAME_OFFSET % align;
1920 frame_phase = off ? align - off : 0;
1923 /* Set TREE_USED on all variables in the local_decls. */
1924 FOR_EACH_LOCAL_DECL (cfun, i, var)
1925 TREE_USED (var) = 1;
1926 /* Clear TREE_USED on all variables associated with a block scope. */
1927 clear_tree_used (DECL_INITIAL (current_function_decl));
1929 init_vars_expansion ();
1931 if (targetm.use_pseudo_pic_reg ())
1932 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1934 for (i = 0; i < SA.map->num_partitions; i++)
1936 tree var = partition_to_var (SA.map, i);
1938 gcc_assert (!virtual_operand_p (var));
1940 expand_one_ssa_partition (var);
1943 for (i = 1; i < num_ssa_names; i++)
1944 adjust_one_expanded_partition_var (ssa_name (i));
1946 if (flag_stack_protect == SPCT_FLAG_STRONG)
1947 gen_stack_protect_signal
1948 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1950 /* At this point all variables on the local_decls with TREE_USED
1951 set are not associated with any block scope. Lay them out. */
1953 len = vec_safe_length (cfun->local_decls);
1954 FOR_EACH_LOCAL_DECL (cfun, i, var)
1956 bool expand_now = false;
1958 /* Expanded above already. */
1959 if (is_gimple_reg (var))
1961 TREE_USED (var) = 0;
1962 goto next;
1964 /* We didn't set a block for static or extern because it's hard
1965 to tell the difference between a global variable (re)declared
1966 in a local scope, and one that's really declared there to
1967 begin with. And it doesn't really matter much, since we're
1968 not giving them stack space. Expand them now. */
1969 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1970 expand_now = true;
1972 /* Expand variables not associated with any block now. Those created by
1973 the optimizers could be live anywhere in the function. Those that
1974 could possibly have been scoped originally and detached from their
1975 block will have their allocation deferred so we coalesce them with
1976 others when optimization is enabled. */
1977 else if (TREE_USED (var))
1978 expand_now = true;
1980 /* Finally, mark all variables on the list as used. We'll use
1981 this in a moment when we expand those associated with scopes. */
1982 TREE_USED (var) = 1;
1984 if (expand_now)
1985 expand_one_var (var, true, true);
1987 next:
1988 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1990 rtx rtl = DECL_RTL_IF_SET (var);
1992 /* Keep artificial non-ignored vars in cfun->local_decls
1993 chain until instantiate_decls. */
1994 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1995 add_local_decl (cfun, var);
1996 else if (rtl == NULL_RTX)
1997 /* If rtl isn't set yet, which can happen e.g. with
1998 -fstack-protector, retry before returning from this
1999 function. */
2000 maybe_local_decls.safe_push (var);
2004 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2006 +-----------------+-----------------+
2007 | ...processed... | ...duplicates...|
2008 +-----------------+-----------------+
2010 +-- LEN points here.
2012 We just want the duplicates, as those are the artificial
2013 non-ignored vars that we want to keep until instantiate_decls.
2014 Move them down and truncate the array. */
2015 if (!vec_safe_is_empty (cfun->local_decls))
2016 cfun->local_decls->block_remove (0, len);
2018 /* At this point, all variables within the block tree with TREE_USED
2019 set are actually used by the optimized function. Lay them out. */
2020 expand_used_vars_for_block (outer_block, true);
2022 if (stack_vars_num > 0)
2024 add_scope_conflicts ();
2026 /* If stack protection is enabled, we don't share space between
2027 vulnerable data and non-vulnerable data. */
2028 if (flag_stack_protect != 0
2029 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2030 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2031 && lookup_attribute ("stack_protect",
2032 DECL_ATTRIBUTES (current_function_decl)))))
2033 add_stack_protection_conflicts ();
2035 /* Now that we have collected all stack variables, and have computed a
2036 minimal interference graph, attempt to save some stack space. */
2037 partition_stack_vars ();
2038 if (dump_file)
2039 dump_stack_var_partition ();
2042 switch (flag_stack_protect)
2044 case SPCT_FLAG_ALL:
2045 create_stack_guard ();
2046 break;
2048 case SPCT_FLAG_STRONG:
2049 if (gen_stack_protect_signal
2050 || cfun->calls_alloca || has_protected_decls
2051 || lookup_attribute ("stack_protect",
2052 DECL_ATTRIBUTES (current_function_decl)))
2053 create_stack_guard ();
2054 break;
2056 case SPCT_FLAG_DEFAULT:
2057 if (cfun->calls_alloca || has_protected_decls
2058 || lookup_attribute ("stack_protect",
2059 DECL_ATTRIBUTES (current_function_decl)))
2060 create_stack_guard ();
2061 break;
2063 case SPCT_FLAG_EXPLICIT:
2064 if (lookup_attribute ("stack_protect",
2065 DECL_ATTRIBUTES (current_function_decl)))
2066 create_stack_guard ();
2067 break;
2068 default:
2072 /* Assign rtl to each variable based on these partitions. */
2073 if (stack_vars_num > 0)
2075 struct stack_vars_data data;
2077 data.asan_vec = vNULL;
2078 data.asan_decl_vec = vNULL;
2079 data.asan_base = NULL_RTX;
2080 data.asan_alignb = 0;
2082 /* Reorder decls to be protected by iterating over the variables
2083 array multiple times, and allocating out of each phase in turn. */
2084 /* ??? We could probably integrate this into the qsort we did
2085 earlier, such that we naturally see these variables first,
2086 and thus naturally allocate things in the right order. */
2087 if (has_protected_decls)
2089 /* Phase 1 contains only character arrays. */
2090 expand_stack_vars (stack_protect_decl_phase_1, &data);
2092 /* Phase 2 contains other kinds of arrays. */
2093 if (flag_stack_protect == SPCT_FLAG_ALL
2094 || flag_stack_protect == SPCT_FLAG_STRONG
2095 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2096 && lookup_attribute ("stack_protect",
2097 DECL_ATTRIBUTES (current_function_decl))))
2098 expand_stack_vars (stack_protect_decl_phase_2, &data);
2101 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
2102 /* Phase 3, any partitions that need asan protection
2103 in addition to phase 1 and 2. */
2104 expand_stack_vars (asan_decl_phase_3, &data);
2106 if (!data.asan_vec.is_empty ())
2108 HOST_WIDE_INT prev_offset = frame_offset;
2109 HOST_WIDE_INT offset, sz, redzonesz;
2110 redzonesz = ASAN_RED_ZONE_SIZE;
2111 sz = data.asan_vec[0] - prev_offset;
2112 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2113 && data.asan_alignb <= 4096
2114 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2115 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2116 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2117 offset
2118 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
2119 data.asan_vec.safe_push (prev_offset);
2120 data.asan_vec.safe_push (offset);
2121 /* Leave space for alignment if STRICT_ALIGNMENT. */
2122 if (STRICT_ALIGNMENT)
2123 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2124 << ASAN_SHADOW_SHIFT)
2125 / BITS_PER_UNIT, 1);
2127 var_end_seq
2128 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2129 data.asan_base,
2130 data.asan_alignb,
2131 data.asan_vec.address (),
2132 data.asan_decl_vec.address (),
2133 data.asan_vec.length ());
2136 expand_stack_vars (NULL, &data);
2138 data.asan_vec.release ();
2139 data.asan_decl_vec.release ();
2142 fini_vars_expansion ();
2144 /* If there were any artificial non-ignored vars without rtl
2145 found earlier, see if deferred stack allocation hasn't assigned
2146 rtl to them. */
2147 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2149 rtx rtl = DECL_RTL_IF_SET (var);
2151 /* Keep artificial non-ignored vars in cfun->local_decls
2152 chain until instantiate_decls. */
2153 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2154 add_local_decl (cfun, var);
2156 maybe_local_decls.release ();
2158 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2159 if (STACK_ALIGNMENT_NEEDED)
2161 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2162 if (!FRAME_GROWS_DOWNWARD)
2163 frame_offset += align - 1;
2164 frame_offset &= -align;
2167 return var_end_seq;
2171 /* If we need to produce a detailed dump, print the tree representation
2172 for STMT to the dump file. SINCE is the last RTX after which the RTL
2173 generated for STMT should have been appended. */
2175 static void
2176 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2178 if (dump_file && (dump_flags & TDF_DETAILS))
2180 fprintf (dump_file, "\n;; ");
2181 print_gimple_stmt (dump_file, stmt, 0,
2182 TDF_SLIM | (dump_flags & TDF_LINENO));
2183 fprintf (dump_file, "\n");
2185 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2189 /* Maps the blocks that do not contain tree labels to rtx labels. */
2191 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2193 /* Returns the label_rtx expression for a label starting basic block BB. */
2195 static rtx_code_label *
2196 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2198 gimple_stmt_iterator gsi;
2199 tree lab;
2201 if (bb->flags & BB_RTL)
2202 return block_label (bb);
2204 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2205 if (elt)
2206 return *elt;
2208 /* Find the tree label if it is present. */
2210 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2212 glabel *lab_stmt;
2214 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2215 if (!lab_stmt)
2216 break;
2218 lab = gimple_label_label (lab_stmt);
2219 if (DECL_NONLOCAL (lab))
2220 break;
2222 return jump_target_rtx (lab);
2225 rtx_code_label *l = gen_label_rtx ();
2226 lab_rtx_for_bb->put (bb, l);
2227 return l;
2231 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2232 of a basic block where we just expanded the conditional at the end,
2233 possibly clean up the CFG and instruction sequence. LAST is the
2234 last instruction before the just emitted jump sequence. */
2236 static void
2237 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2239 /* Special case: when jumpif decides that the condition is
2240 trivial it emits an unconditional jump (and the necessary
2241 barrier). But we still have two edges, the fallthru one is
2242 wrong. purge_dead_edges would clean this up later. Unfortunately
2243 we have to insert insns (and split edges) before
2244 find_many_sub_basic_blocks and hence before purge_dead_edges.
2245 But splitting edges might create new blocks which depend on the
2246 fact that if there are two edges there's no barrier. So the
2247 barrier would get lost and verify_flow_info would ICE. Instead
2248 of auditing all edge splitters to care for the barrier (which
2249 normally isn't there in a cleaned CFG), fix it here. */
2250 if (BARRIER_P (get_last_insn ()))
2252 rtx_insn *insn;
2253 remove_edge (e);
2254 /* Now, we have a single successor block, if we have insns to
2255 insert on the remaining edge we potentially will insert
2256 it at the end of this block (if the dest block isn't feasible)
2257 in order to avoid splitting the edge. This insertion will take
2258 place in front of the last jump. But we might have emitted
2259 multiple jumps (conditional and one unconditional) to the
2260 same destination. Inserting in front of the last one then
2261 is a problem. See PR 40021. We fix this by deleting all
2262 jumps except the last unconditional one. */
2263 insn = PREV_INSN (get_last_insn ());
2264 /* Make sure we have an unconditional jump. Otherwise we're
2265 confused. */
2266 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2267 for (insn = PREV_INSN (insn); insn != last;)
2269 insn = PREV_INSN (insn);
2270 if (JUMP_P (NEXT_INSN (insn)))
2272 if (!any_condjump_p (NEXT_INSN (insn)))
2274 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2275 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2277 delete_insn (NEXT_INSN (insn));
2283 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2284 Returns a new basic block if we've terminated the current basic
2285 block and created a new one. */
2287 static basic_block
2288 expand_gimple_cond (basic_block bb, gcond *stmt)
2290 basic_block new_bb, dest;
2291 edge new_edge;
2292 edge true_edge;
2293 edge false_edge;
2294 rtx_insn *last2, *last;
2295 enum tree_code code;
2296 tree op0, op1;
2298 code = gimple_cond_code (stmt);
2299 op0 = gimple_cond_lhs (stmt);
2300 op1 = gimple_cond_rhs (stmt);
2301 /* We're sometimes presented with such code:
2302 D.123_1 = x < y;
2303 if (D.123_1 != 0)
2305 This would expand to two comparisons which then later might
2306 be cleaned up by combine. But some pattern matchers like if-conversion
2307 work better when there's only one compare, so make up for this
2308 here as special exception if TER would have made the same change. */
2309 if (SA.values
2310 && TREE_CODE (op0) == SSA_NAME
2311 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2312 && TREE_CODE (op1) == INTEGER_CST
2313 && ((gimple_cond_code (stmt) == NE_EXPR
2314 && integer_zerop (op1))
2315 || (gimple_cond_code (stmt) == EQ_EXPR
2316 && integer_onep (op1)))
2317 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2319 gimple second = SSA_NAME_DEF_STMT (op0);
2320 if (gimple_code (second) == GIMPLE_ASSIGN)
2322 enum tree_code code2 = gimple_assign_rhs_code (second);
2323 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2325 code = code2;
2326 op0 = gimple_assign_rhs1 (second);
2327 op1 = gimple_assign_rhs2 (second);
2329 /* If jumps are cheap and the target does not support conditional
2330 compare, turn some more codes into jumpy sequences. */
2331 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2332 && targetm.gen_ccmp_first == NULL)
2334 if ((code2 == BIT_AND_EXPR
2335 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2336 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2337 || code2 == TRUTH_AND_EXPR)
2339 code = TRUTH_ANDIF_EXPR;
2340 op0 = gimple_assign_rhs1 (second);
2341 op1 = gimple_assign_rhs2 (second);
2343 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2345 code = TRUTH_ORIF_EXPR;
2346 op0 = gimple_assign_rhs1 (second);
2347 op1 = gimple_assign_rhs2 (second);
2353 last2 = last = get_last_insn ();
2355 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2356 set_curr_insn_location (gimple_location (stmt));
2358 /* These flags have no purpose in RTL land. */
2359 true_edge->flags &= ~EDGE_TRUE_VALUE;
2360 false_edge->flags &= ~EDGE_FALSE_VALUE;
2362 /* We can either have a pure conditional jump with one fallthru edge or
2363 two-way jump that needs to be decomposed into two basic blocks. */
2364 if (false_edge->dest == bb->next_bb)
2366 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2367 true_edge->probability);
2368 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2369 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2370 set_curr_insn_location (true_edge->goto_locus);
2371 false_edge->flags |= EDGE_FALLTHRU;
2372 maybe_cleanup_end_of_block (false_edge, last);
2373 return NULL;
2375 if (true_edge->dest == bb->next_bb)
2377 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2378 false_edge->probability);
2379 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2380 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2381 set_curr_insn_location (false_edge->goto_locus);
2382 true_edge->flags |= EDGE_FALLTHRU;
2383 maybe_cleanup_end_of_block (true_edge, last);
2384 return NULL;
2387 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2388 true_edge->probability);
2389 last = get_last_insn ();
2390 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2391 set_curr_insn_location (false_edge->goto_locus);
2392 emit_jump (label_rtx_for_bb (false_edge->dest));
2394 BB_END (bb) = last;
2395 if (BARRIER_P (BB_END (bb)))
2396 BB_END (bb) = PREV_INSN (BB_END (bb));
2397 update_bb_for_insn (bb);
2399 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2400 dest = false_edge->dest;
2401 redirect_edge_succ (false_edge, new_bb);
2402 false_edge->flags |= EDGE_FALLTHRU;
2403 new_bb->count = false_edge->count;
2404 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2405 add_bb_to_loop (new_bb, bb->loop_father);
2406 new_edge = make_edge (new_bb, dest, 0);
2407 new_edge->probability = REG_BR_PROB_BASE;
2408 new_edge->count = new_bb->count;
2409 if (BARRIER_P (BB_END (new_bb)))
2410 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2411 update_bb_for_insn (new_bb);
2413 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2415 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2417 set_curr_insn_location (true_edge->goto_locus);
2418 true_edge->goto_locus = curr_insn_location ();
2421 return new_bb;
2424 /* Mark all calls that can have a transaction restart. */
2426 static void
2427 mark_transaction_restart_calls (gimple stmt)
2429 struct tm_restart_node dummy;
2430 tm_restart_node **slot;
2432 if (!cfun->gimple_df->tm_restart)
2433 return;
2435 dummy.stmt = stmt;
2436 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2437 if (slot)
2439 struct tm_restart_node *n = *slot;
2440 tree list = n->label_or_list;
2441 rtx_insn *insn;
2443 for (insn = next_real_insn (get_last_insn ());
2444 !CALL_P (insn);
2445 insn = next_real_insn (insn))
2446 continue;
2448 if (TREE_CODE (list) == LABEL_DECL)
2449 add_reg_note (insn, REG_TM, label_rtx (list));
2450 else
2451 for (; list ; list = TREE_CHAIN (list))
2452 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2456 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2457 statement STMT. */
2459 static void
2460 expand_call_stmt (gcall *stmt)
2462 tree exp, decl, lhs;
2463 bool builtin_p;
2464 size_t i;
2466 if (gimple_call_internal_p (stmt))
2468 expand_internal_call (stmt);
2469 return;
2472 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2474 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2475 decl = gimple_call_fndecl (stmt);
2476 builtin_p = decl && DECL_BUILT_IN (decl);
2478 /* If this is not a builtin function, the function type through which the
2479 call is made may be different from the type of the function. */
2480 if (!builtin_p)
2481 CALL_EXPR_FN (exp)
2482 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2483 CALL_EXPR_FN (exp));
2485 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2486 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2488 for (i = 0; i < gimple_call_num_args (stmt); i++)
2490 tree arg = gimple_call_arg (stmt, i);
2491 gimple def;
2492 /* TER addresses into arguments of builtin functions so we have a
2493 chance to infer more correct alignment information. See PR39954. */
2494 if (builtin_p
2495 && TREE_CODE (arg) == SSA_NAME
2496 && (def = get_gimple_for_ssa_name (arg))
2497 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2498 arg = gimple_assign_rhs1 (def);
2499 CALL_EXPR_ARG (exp, i) = arg;
2502 if (gimple_has_side_effects (stmt))
2503 TREE_SIDE_EFFECTS (exp) = 1;
2505 if (gimple_call_nothrow_p (stmt))
2506 TREE_NOTHROW (exp) = 1;
2508 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2509 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2510 if (decl
2511 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2512 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2513 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2514 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2515 else
2516 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2517 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2518 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2519 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2521 /* Ensure RTL is created for debug args. */
2522 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2524 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2525 unsigned int ix;
2526 tree dtemp;
2528 if (debug_args)
2529 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2531 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2532 expand_debug_expr (dtemp);
2536 lhs = gimple_call_lhs (stmt);
2537 if (lhs)
2538 expand_assignment (lhs, exp, false);
2539 else
2540 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2542 mark_transaction_restart_calls (stmt);
2546 /* Generate RTL for an asm statement (explicit assembler code).
2547 STRING is a STRING_CST node containing the assembler code text,
2548 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2549 insn is volatile; don't optimize it. */
2551 static void
2552 expand_asm_loc (tree string, int vol, location_t locus)
2554 rtx body;
2556 if (TREE_CODE (string) == ADDR_EXPR)
2557 string = TREE_OPERAND (string, 0);
2559 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2560 ggc_strdup (TREE_STRING_POINTER (string)),
2561 locus);
2563 MEM_VOLATILE_P (body) = vol;
2565 emit_insn (body);
2568 /* Return the number of times character C occurs in string S. */
2569 static int
2570 n_occurrences (int c, const char *s)
2572 int n = 0;
2573 while (*s)
2574 n += (*s++ == c);
2575 return n;
2578 /* A subroutine of expand_asm_operands. Check that all operands have
2579 the same number of alternatives. Return true if so. */
2581 static bool
2582 check_operand_nalternatives (const vec<const char *> &constraints)
2584 unsigned len = constraints.length();
2585 if (len > 0)
2587 int nalternatives = n_occurrences (',', constraints[0]);
2589 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2591 error ("too many alternatives in %<asm%>");
2592 return false;
2595 for (unsigned i = 1; i < len; ++i)
2596 if (n_occurrences (',', constraints[i]) != nalternatives)
2598 error ("operand constraints for %<asm%> differ "
2599 "in number of alternatives");
2600 return false;
2603 return true;
2606 /* Check for overlap between registers marked in CLOBBERED_REGS and
2607 anything inappropriate in T. Emit error and return the register
2608 variable definition for error, NULL_TREE for ok. */
2610 static bool
2611 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2613 /* Conflicts between asm-declared register variables and the clobber
2614 list are not allowed. */
2615 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2617 if (overlap)
2619 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2620 DECL_NAME (overlap));
2622 /* Reset registerness to stop multiple errors emitted for a single
2623 variable. */
2624 DECL_REGISTER (overlap) = 0;
2625 return true;
2628 return false;
2631 /* Generate RTL for an asm statement with arguments.
2632 STRING is the instruction template.
2633 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2634 Each output or input has an expression in the TREE_VALUE and
2635 a tree list in TREE_PURPOSE which in turn contains a constraint
2636 name in TREE_VALUE (or NULL_TREE) and a constraint string
2637 in TREE_PURPOSE.
2638 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2639 that is clobbered by this insn.
2641 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2642 should be the fallthru basic block of the asm goto.
2644 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2645 Some elements of OUTPUTS may be replaced with trees representing temporary
2646 values. The caller should copy those temporary values to the originally
2647 specified lvalues.
2649 VOL nonzero means the insn is volatile; don't optimize it. */
2651 static void
2652 expand_asm_stmt (gasm *stmt)
2654 class save_input_location
2656 location_t old;
2658 public:
2659 explicit save_input_location(location_t where)
2661 old = input_location;
2662 input_location = where;
2665 ~save_input_location()
2667 input_location = old;
2671 location_t locus = gimple_location (stmt);
2673 if (gimple_asm_input_p (stmt))
2675 const char *s = gimple_asm_string (stmt);
2676 tree string = build_string (strlen (s), s);
2677 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2678 return;
2681 /* There are some legacy diagnostics in here, and also avoids a
2682 sixth parameger to targetm.md_asm_adjust. */
2683 save_input_location s_i_l(locus);
2685 unsigned noutputs = gimple_asm_noutputs (stmt);
2686 unsigned ninputs = gimple_asm_ninputs (stmt);
2687 unsigned nlabels = gimple_asm_nlabels (stmt);
2688 unsigned i;
2690 /* ??? Diagnose during gimplification? */
2691 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2693 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2694 return;
2697 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2698 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2699 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2701 /* Copy the gimple vectors into new vectors that we can manipulate. */
2703 output_tvec.safe_grow (noutputs);
2704 input_tvec.safe_grow (ninputs);
2705 constraints.safe_grow (noutputs + ninputs);
2707 for (i = 0; i < noutputs; ++i)
2709 tree t = gimple_asm_output_op (stmt, i);
2710 output_tvec[i] = TREE_VALUE (t);
2711 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2713 for (i = 0; i < ninputs; i++)
2715 tree t = gimple_asm_input_op (stmt, i);
2716 input_tvec[i] = TREE_VALUE (t);
2717 constraints[i + noutputs]
2718 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2721 /* ??? Diagnose during gimplification? */
2722 if (! check_operand_nalternatives (constraints))
2723 return;
2725 /* Count the number of meaningful clobbered registers, ignoring what
2726 we would ignore later. */
2727 auto_vec<rtx> clobber_rvec;
2728 HARD_REG_SET clobbered_regs;
2729 CLEAR_HARD_REG_SET (clobbered_regs);
2731 if (unsigned n = gimple_asm_nclobbers (stmt))
2733 clobber_rvec.reserve (n);
2734 for (i = 0; i < n; i++)
2736 tree t = gimple_asm_clobber_op (stmt, i);
2737 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2738 int nregs, j;
2740 j = decode_reg_name_and_count (regname, &nregs);
2741 if (j < 0)
2743 if (j == -2)
2745 /* ??? Diagnose during gimplification? */
2746 error ("unknown register name %qs in %<asm%>", regname);
2748 else if (j == -4)
2750 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2751 clobber_rvec.safe_push (x);
2753 else
2755 /* Otherwise we should have -1 == empty string
2756 or -3 == cc, which is not a register. */
2757 gcc_assert (j == -1 || j == -3);
2760 else
2761 for (int reg = j; reg < j + nregs; reg++)
2763 /* Clobbering the PIC register is an error. */
2764 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2766 /* ??? Diagnose during gimplification? */
2767 error ("PIC register clobbered by %qs in %<asm%>",
2768 regname);
2769 return;
2772 SET_HARD_REG_BIT (clobbered_regs, reg);
2773 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2774 clobber_rvec.safe_push (x);
2778 unsigned nclobbers = clobber_rvec.length();
2780 /* First pass over inputs and outputs checks validity and sets
2781 mark_addressable if needed. */
2782 /* ??? Diagnose during gimplification? */
2784 for (i = 0; i < noutputs; ++i)
2786 tree val = output_tvec[i];
2787 tree type = TREE_TYPE (val);
2788 const char *constraint;
2789 bool is_inout;
2790 bool allows_reg;
2791 bool allows_mem;
2793 /* Try to parse the output constraint. If that fails, there's
2794 no point in going further. */
2795 constraint = constraints[i];
2796 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2797 &allows_mem, &allows_reg, &is_inout))
2798 return;
2800 if (! allows_reg
2801 && (allows_mem
2802 || is_inout
2803 || (DECL_P (val)
2804 && REG_P (DECL_RTL (val))
2805 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2806 mark_addressable (val);
2809 for (i = 0; i < ninputs; ++i)
2811 bool allows_reg, allows_mem;
2812 const char *constraint;
2814 constraint = constraints[i + noutputs];
2815 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2816 constraints.address (),
2817 &allows_mem, &allows_reg))
2818 return;
2820 if (! allows_reg && allows_mem)
2821 mark_addressable (input_tvec[i]);
2824 /* Second pass evaluates arguments. */
2826 /* Make sure stack is consistent for asm goto. */
2827 if (nlabels > 0)
2828 do_pending_stack_adjust ();
2829 int old_generating_concat_p = generating_concat_p;
2831 /* Vector of RTX's of evaluated output operands. */
2832 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2833 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2834 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
2836 output_rvec.safe_grow (noutputs);
2838 for (i = 0; i < noutputs; ++i)
2840 tree val = output_tvec[i];
2841 tree type = TREE_TYPE (val);
2842 bool is_inout, allows_reg, allows_mem, ok;
2843 rtx op;
2845 ok = parse_output_constraint (&constraints[i], i, ninputs,
2846 noutputs, &allows_mem, &allows_reg,
2847 &is_inout);
2848 gcc_assert (ok);
2850 /* If an output operand is not a decl or indirect ref and our constraint
2851 allows a register, make a temporary to act as an intermediate.
2852 Make the asm insn write into that, then we will copy it to
2853 the real output operand. Likewise for promoted variables. */
2855 generating_concat_p = 0;
2857 if ((TREE_CODE (val) == INDIRECT_REF
2858 && allows_mem)
2859 || (DECL_P (val)
2860 && (allows_mem || REG_P (DECL_RTL (val)))
2861 && ! (REG_P (DECL_RTL (val))
2862 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2863 || ! allows_reg
2864 || is_inout)
2866 op = expand_expr (val, NULL_RTX, VOIDmode,
2867 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2868 if (MEM_P (op))
2869 op = validize_mem (op);
2871 if (! allows_reg && !MEM_P (op))
2872 error ("output number %d not directly addressable", i);
2873 if ((! allows_mem && MEM_P (op))
2874 || GET_CODE (op) == CONCAT)
2876 rtx old_op = op;
2877 op = gen_reg_rtx (GET_MODE (op));
2879 generating_concat_p = old_generating_concat_p;
2881 if (is_inout)
2882 emit_move_insn (op, old_op);
2884 push_to_sequence2 (after_rtl_seq, after_rtl_end);
2885 emit_move_insn (old_op, op);
2886 after_rtl_seq = get_insns ();
2887 after_rtl_end = get_last_insn ();
2888 end_sequence ();
2891 else
2893 op = assign_temp (type, 0, 1);
2894 op = validize_mem (op);
2895 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
2896 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
2898 generating_concat_p = old_generating_concat_p;
2900 push_to_sequence2 (after_rtl_seq, after_rtl_end);
2901 expand_assignment (val, make_tree (type, op), false);
2902 after_rtl_seq = get_insns ();
2903 after_rtl_end = get_last_insn ();
2904 end_sequence ();
2906 output_rvec[i] = op;
2908 if (is_inout)
2909 inout_opnum.safe_push (i);
2912 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
2913 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
2915 input_rvec.safe_grow (ninputs);
2916 input_mode.safe_grow (ninputs);
2918 generating_concat_p = 0;
2920 for (i = 0; i < ninputs; ++i)
2922 tree val = input_tvec[i];
2923 tree type = TREE_TYPE (val);
2924 bool allows_reg, allows_mem, ok;
2925 const char *constraint;
2926 rtx op;
2928 constraint = constraints[i + noutputs];
2929 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2930 constraints.address (),
2931 &allows_mem, &allows_reg);
2932 gcc_assert (ok);
2934 /* EXPAND_INITIALIZER will not generate code for valid initializer
2935 constants, but will still generate code for other types of operand.
2936 This is the behavior we want for constant constraints. */
2937 op = expand_expr (val, NULL_RTX, VOIDmode,
2938 allows_reg ? EXPAND_NORMAL
2939 : allows_mem ? EXPAND_MEMORY
2940 : EXPAND_INITIALIZER);
2942 /* Never pass a CONCAT to an ASM. */
2943 if (GET_CODE (op) == CONCAT)
2944 op = force_reg (GET_MODE (op), op);
2945 else if (MEM_P (op))
2946 op = validize_mem (op);
2948 if (asm_operand_ok (op, constraint, NULL) <= 0)
2950 if (allows_reg && TYPE_MODE (type) != BLKmode)
2951 op = force_reg (TYPE_MODE (type), op);
2952 else if (!allows_mem)
2953 warning (0, "asm operand %d probably doesn%'t match constraints",
2954 i + noutputs);
2955 else if (MEM_P (op))
2957 /* We won't recognize either volatile memory or memory
2958 with a queued address as available a memory_operand
2959 at this point. Ignore it: clearly this *is* a memory. */
2961 else
2962 gcc_unreachable ();
2964 input_rvec[i] = op;
2965 input_mode[i] = TYPE_MODE (type);
2968 /* For in-out operands, copy output rtx to input rtx. */
2969 unsigned ninout = inout_opnum.length();
2970 for (i = 0; i < ninout; i++)
2972 int j = inout_opnum[i];
2973 rtx o = output_rvec[j];
2975 input_rvec.safe_push (o);
2976 input_mode.safe_push (GET_MODE (o));
2978 char buffer[16];
2979 sprintf (buffer, "%d", j);
2980 constraints.safe_push (ggc_strdup (buffer));
2982 ninputs += ninout;
2984 /* Sometimes we wish to automatically clobber registers across an asm.
2985 Case in point is when the i386 backend moved from cc0 to a hard reg --
2986 maintaining source-level compatibility means automatically clobbering
2987 the flags register. */
2988 rtx_insn *after_md_seq = NULL;
2989 if (targetm.md_asm_adjust)
2990 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
2991 constraints, clobber_rvec,
2992 clobbered_regs);
2994 /* Do not allow the hook to change the output and input count,
2995 lest it mess up the operand numbering. */
2996 gcc_assert (output_rvec.length() == noutputs);
2997 gcc_assert (input_rvec.length() == ninputs);
2998 gcc_assert (constraints.length() == noutputs + ninputs);
3000 /* But it certainly can adjust the clobbers. */
3001 nclobbers = clobber_rvec.length();
3003 /* Third pass checks for easy conflicts. */
3004 /* ??? Why are we doing this on trees instead of rtx. */
3006 bool clobber_conflict_found = 0;
3007 for (i = 0; i < noutputs; ++i)
3008 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3009 clobber_conflict_found = 1;
3010 for (i = 0; i < ninputs - ninout; ++i)
3011 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3012 clobber_conflict_found = 1;
3014 /* Make vectors for the expression-rtx, constraint strings,
3015 and named operands. */
3017 rtvec argvec = rtvec_alloc (ninputs);
3018 rtvec constraintvec = rtvec_alloc (ninputs);
3019 rtvec labelvec = rtvec_alloc (nlabels);
3021 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3022 : GET_MODE (output_rvec[0])),
3023 ggc_strdup (gimple_asm_string (stmt)),
3024 empty_string, 0, argvec, constraintvec,
3025 labelvec, locus);
3026 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3028 for (i = 0; i < ninputs; ++i)
3030 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3031 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3032 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3033 constraints[i + noutputs],
3034 locus);
3037 /* Copy labels to the vector. */
3038 rtx_code_label *fallthru_label = NULL;
3039 if (nlabels > 0)
3041 basic_block fallthru_bb = NULL;
3042 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3043 if (fallthru)
3044 fallthru_bb = fallthru->dest;
3046 for (i = 0; i < nlabels; ++i)
3048 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3049 rtx_insn *r;
3050 /* If asm goto has any labels in the fallthru basic block, use
3051 a label that we emit immediately after the asm goto. Expansion
3052 may insert further instructions into the same basic block after
3053 asm goto and if we don't do this, insertion of instructions on
3054 the fallthru edge might misbehave. See PR58670. */
3055 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3057 if (fallthru_label == NULL_RTX)
3058 fallthru_label = gen_label_rtx ();
3059 r = fallthru_label;
3061 else
3062 r = label_rtx (label);
3063 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3067 /* Now, for each output, construct an rtx
3068 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3069 ARGVEC CONSTRAINTS OPNAMES))
3070 If there is more than one, put them inside a PARALLEL. */
3072 if (nlabels > 0 && nclobbers == 0)
3074 gcc_assert (noutputs == 0);
3075 emit_jump_insn (body);
3077 else if (noutputs == 0 && nclobbers == 0)
3079 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3080 emit_insn (body);
3082 else if (noutputs == 1 && nclobbers == 0)
3084 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3085 emit_insn (gen_rtx_SET (output_rvec[0], body));
3087 else
3089 rtx obody = body;
3090 int num = noutputs;
3092 if (num == 0)
3093 num = 1;
3095 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3097 /* For each output operand, store a SET. */
3098 for (i = 0; i < noutputs; ++i)
3100 rtx src, o = output_rvec[i];
3101 if (i == 0)
3103 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3104 src = obody;
3106 else
3108 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3109 ASM_OPERANDS_TEMPLATE (obody),
3110 constraints[i], i, argvec,
3111 constraintvec, labelvec, locus);
3112 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3114 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3117 /* If there are no outputs (but there are some clobbers)
3118 store the bare ASM_OPERANDS into the PARALLEL. */
3119 if (i == 0)
3120 XVECEXP (body, 0, i++) = obody;
3122 /* Store (clobber REG) for each clobbered register specified. */
3123 for (unsigned j = 0; j < nclobbers; ++j)
3125 rtx clobbered_reg = clobber_rvec[j];
3127 /* Do sanity check for overlap between clobbers and respectively
3128 input and outputs that hasn't been handled. Such overlap
3129 should have been detected and reported above. */
3130 if (!clobber_conflict_found && REG_P (clobbered_reg))
3132 /* We test the old body (obody) contents to avoid
3133 tripping over the under-construction body. */
3134 for (unsigned k = 0; k < noutputs; ++k)
3135 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3136 internal_error ("asm clobber conflict with output operand");
3138 for (unsigned k = 0; k < ninputs - ninout; ++k)
3139 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3140 internal_error ("asm clobber conflict with input operand");
3143 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3146 if (nlabels > 0)
3147 emit_jump_insn (body);
3148 else
3149 emit_insn (body);
3152 generating_concat_p = old_generating_concat_p;
3154 if (fallthru_label)
3155 emit_label (fallthru_label);
3157 if (after_md_seq)
3158 emit_insn (after_md_seq);
3159 if (after_rtl_seq)
3160 emit_insn (after_rtl_seq);
3162 free_temp_slots ();
3163 crtl->has_asm_statement = 1;
3166 /* Emit code to jump to the address
3167 specified by the pointer expression EXP. */
3169 static void
3170 expand_computed_goto (tree exp)
3172 rtx x = expand_normal (exp);
3174 do_pending_stack_adjust ();
3175 emit_indirect_jump (x);
3178 /* Generate RTL code for a `goto' statement with target label LABEL.
3179 LABEL should be a LABEL_DECL tree node that was or will later be
3180 defined with `expand_label'. */
3182 static void
3183 expand_goto (tree label)
3185 #ifdef ENABLE_CHECKING
3186 /* Check for a nonlocal goto to a containing function. Should have
3187 gotten translated to __builtin_nonlocal_goto. */
3188 tree context = decl_function_context (label);
3189 gcc_assert (!context || context == current_function_decl);
3190 #endif
3192 emit_jump (jump_target_rtx (label));
3195 /* Output a return with no value. */
3197 static void
3198 expand_null_return_1 (void)
3200 clear_pending_stack_adjust ();
3201 do_pending_stack_adjust ();
3202 emit_jump (return_label);
3205 /* Generate RTL to return from the current function, with no value.
3206 (That is, we do not do anything about returning any value.) */
3208 void
3209 expand_null_return (void)
3211 /* If this function was declared to return a value, but we
3212 didn't, clobber the return registers so that they are not
3213 propagated live to the rest of the function. */
3214 clobber_return_register ();
3216 expand_null_return_1 ();
3219 /* Generate RTL to return from the current function, with value VAL. */
3221 static void
3222 expand_value_return (rtx val)
3224 /* Copy the value to the return location unless it's already there. */
3226 tree decl = DECL_RESULT (current_function_decl);
3227 rtx return_reg = DECL_RTL (decl);
3228 if (return_reg != val)
3230 tree funtype = TREE_TYPE (current_function_decl);
3231 tree type = TREE_TYPE (decl);
3232 int unsignedp = TYPE_UNSIGNED (type);
3233 machine_mode old_mode = DECL_MODE (decl);
3234 machine_mode mode;
3235 if (DECL_BY_REFERENCE (decl))
3236 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3237 else
3238 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3240 if (mode != old_mode)
3241 val = convert_modes (mode, old_mode, val, unsignedp);
3243 if (GET_CODE (return_reg) == PARALLEL)
3244 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3245 else
3246 emit_move_insn (return_reg, val);
3249 expand_null_return_1 ();
3252 /* Generate RTL to evaluate the expression RETVAL and return it
3253 from the current function. */
3255 static void
3256 expand_return (tree retval, tree bounds)
3258 rtx result_rtl;
3259 rtx val = 0;
3260 tree retval_rhs;
3261 rtx bounds_rtl;
3263 /* If function wants no value, give it none. */
3264 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3266 expand_normal (retval);
3267 expand_null_return ();
3268 return;
3271 if (retval == error_mark_node)
3273 /* Treat this like a return of no value from a function that
3274 returns a value. */
3275 expand_null_return ();
3276 return;
3278 else if ((TREE_CODE (retval) == MODIFY_EXPR
3279 || TREE_CODE (retval) == INIT_EXPR)
3280 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3281 retval_rhs = TREE_OPERAND (retval, 1);
3282 else
3283 retval_rhs = retval;
3285 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3287 /* Put returned bounds to the right place. */
3288 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3289 if (bounds_rtl)
3291 rtx addr = NULL;
3292 rtx bnd = NULL;
3294 if (bounds && bounds != error_mark_node)
3296 bnd = expand_normal (bounds);
3297 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3299 else if (REG_P (bounds_rtl))
3301 if (bounds)
3302 bnd = chkp_expand_zero_bounds ();
3303 else
3305 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3306 addr = gen_rtx_MEM (Pmode, addr);
3307 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3310 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3312 else
3314 int n;
3316 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3318 if (bounds)
3319 bnd = chkp_expand_zero_bounds ();
3320 else
3322 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3323 addr = gen_rtx_MEM (Pmode, addr);
3326 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3328 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3329 if (!bounds)
3331 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3332 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3333 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3335 targetm.calls.store_returned_bounds (slot, bnd);
3339 else if (chkp_function_instrumented_p (current_function_decl)
3340 && !BOUNDED_P (retval_rhs)
3341 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3342 && TREE_CODE (retval_rhs) != RESULT_DECL)
3344 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3345 addr = gen_rtx_MEM (Pmode, addr);
3347 gcc_assert (MEM_P (result_rtl));
3349 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3352 /* If we are returning the RESULT_DECL, then the value has already
3353 been stored into it, so we don't have to do anything special. */
3354 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3355 expand_value_return (result_rtl);
3357 /* If the result is an aggregate that is being returned in one (or more)
3358 registers, load the registers here. */
3360 else if (retval_rhs != 0
3361 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3362 && REG_P (result_rtl))
3364 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3365 if (val)
3367 /* Use the mode of the result value on the return register. */
3368 PUT_MODE (result_rtl, GET_MODE (val));
3369 expand_value_return (val);
3371 else
3372 expand_null_return ();
3374 else if (retval_rhs != 0
3375 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3376 && (REG_P (result_rtl)
3377 || (GET_CODE (result_rtl) == PARALLEL)))
3379 /* Compute the return value into a temporary (usually a pseudo reg). */
3381 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3382 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3383 val = force_not_mem (val);
3384 expand_value_return (val);
3386 else
3388 /* No hard reg used; calculate value into hard return reg. */
3389 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3390 expand_value_return (result_rtl);
3394 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3395 STMT that doesn't require special handling for outgoing edges. That
3396 is no tailcalls and no GIMPLE_COND. */
3398 static void
3399 expand_gimple_stmt_1 (gimple stmt)
3401 tree op0;
3403 set_curr_insn_location (gimple_location (stmt));
3405 switch (gimple_code (stmt))
3407 case GIMPLE_GOTO:
3408 op0 = gimple_goto_dest (stmt);
3409 if (TREE_CODE (op0) == LABEL_DECL)
3410 expand_goto (op0);
3411 else
3412 expand_computed_goto (op0);
3413 break;
3414 case GIMPLE_LABEL:
3415 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3416 break;
3417 case GIMPLE_NOP:
3418 case GIMPLE_PREDICT:
3419 break;
3420 case GIMPLE_SWITCH:
3421 expand_case (as_a <gswitch *> (stmt));
3422 break;
3423 case GIMPLE_ASM:
3424 expand_asm_stmt (as_a <gasm *> (stmt));
3425 break;
3426 case GIMPLE_CALL:
3427 expand_call_stmt (as_a <gcall *> (stmt));
3428 break;
3430 case GIMPLE_RETURN:
3432 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3433 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3435 if (op0 && op0 != error_mark_node)
3437 tree result = DECL_RESULT (current_function_decl);
3439 /* If we are not returning the current function's RESULT_DECL,
3440 build an assignment to it. */
3441 if (op0 != result)
3443 /* I believe that a function's RESULT_DECL is unique. */
3444 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3446 /* ??? We'd like to use simply expand_assignment here,
3447 but this fails if the value is of BLKmode but the return
3448 decl is a register. expand_return has special handling
3449 for this combination, which eventually should move
3450 to common code. See comments there. Until then, let's
3451 build a modify expression :-/ */
3452 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3453 result, op0);
3455 /* Mark we have return statement with missing bounds. */
3456 if (!bnd && chkp_function_instrumented_p (cfun->decl))
3457 bnd = error_mark_node;
3460 if (!op0)
3461 expand_null_return ();
3462 else
3463 expand_return (op0, bnd);
3465 break;
3467 case GIMPLE_ASSIGN:
3469 gassign *assign_stmt = as_a <gassign *> (stmt);
3470 tree lhs = gimple_assign_lhs (assign_stmt);
3472 /* Tree expand used to fiddle with |= and &= of two bitfield
3473 COMPONENT_REFs here. This can't happen with gimple, the LHS
3474 of binary assigns must be a gimple reg. */
3476 if (TREE_CODE (lhs) != SSA_NAME
3477 || get_gimple_rhs_class (gimple_expr_code (stmt))
3478 == GIMPLE_SINGLE_RHS)
3480 tree rhs = gimple_assign_rhs1 (assign_stmt);
3481 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3482 == GIMPLE_SINGLE_RHS);
3483 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3484 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3485 if (TREE_CLOBBER_P (rhs))
3486 /* This is a clobber to mark the going out of scope for
3487 this LHS. */
3489 else
3490 expand_assignment (lhs, rhs,
3491 gimple_assign_nontemporal_move_p (
3492 assign_stmt));
3494 else
3496 rtx target, temp;
3497 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3498 struct separate_ops ops;
3499 bool promoted = false;
3501 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3502 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3503 promoted = true;
3505 ops.code = gimple_assign_rhs_code (assign_stmt);
3506 ops.type = TREE_TYPE (lhs);
3507 switch (get_gimple_rhs_class (ops.code))
3509 case GIMPLE_TERNARY_RHS:
3510 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3511 /* Fallthru */
3512 case GIMPLE_BINARY_RHS:
3513 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3514 /* Fallthru */
3515 case GIMPLE_UNARY_RHS:
3516 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3517 break;
3518 default:
3519 gcc_unreachable ();
3521 ops.location = gimple_location (stmt);
3523 /* If we want to use a nontemporal store, force the value to
3524 register first. If we store into a promoted register,
3525 don't directly expand to target. */
3526 temp = nontemporal || promoted ? NULL_RTX : target;
3527 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3528 EXPAND_NORMAL);
3530 if (temp == target)
3532 else if (promoted)
3534 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3535 /* If TEMP is a VOIDmode constant, use convert_modes to make
3536 sure that we properly convert it. */
3537 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3539 temp = convert_modes (GET_MODE (target),
3540 TYPE_MODE (ops.type),
3541 temp, unsignedp);
3542 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3543 GET_MODE (target), temp, unsignedp);
3546 convert_move (SUBREG_REG (target), temp, unsignedp);
3548 else if (nontemporal && emit_storent_insn (target, temp))
3550 else
3552 temp = force_operand (temp, target);
3553 if (temp != target)
3554 emit_move_insn (target, temp);
3558 break;
3560 default:
3561 gcc_unreachable ();
3565 /* Expand one gimple statement STMT and return the last RTL instruction
3566 before any of the newly generated ones.
3568 In addition to generating the necessary RTL instructions this also
3569 sets REG_EH_REGION notes if necessary and sets the current source
3570 location for diagnostics. */
3572 static rtx_insn *
3573 expand_gimple_stmt (gimple stmt)
3575 location_t saved_location = input_location;
3576 rtx_insn *last = get_last_insn ();
3577 int lp_nr;
3579 gcc_assert (cfun);
3581 /* We need to save and restore the current source location so that errors
3582 discovered during expansion are emitted with the right location. But
3583 it would be better if the diagnostic routines used the source location
3584 embedded in the tree nodes rather than globals. */
3585 if (gimple_has_location (stmt))
3586 input_location = gimple_location (stmt);
3588 expand_gimple_stmt_1 (stmt);
3590 /* Free any temporaries used to evaluate this statement. */
3591 free_temp_slots ();
3593 input_location = saved_location;
3595 /* Mark all insns that may trap. */
3596 lp_nr = lookup_stmt_eh_lp (stmt);
3597 if (lp_nr)
3599 rtx_insn *insn;
3600 for (insn = next_real_insn (last); insn;
3601 insn = next_real_insn (insn))
3603 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3604 /* If we want exceptions for non-call insns, any
3605 may_trap_p instruction may throw. */
3606 && GET_CODE (PATTERN (insn)) != CLOBBER
3607 && GET_CODE (PATTERN (insn)) != USE
3608 && insn_could_throw_p (insn))
3609 make_reg_eh_region_note (insn, 0, lp_nr);
3613 return last;
3616 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3617 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3618 generated a tail call (something that might be denied by the ABI
3619 rules governing the call; see calls.c).
3621 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3622 can still reach the rest of BB. The case here is __builtin_sqrt,
3623 where the NaN result goes through the external function (with a
3624 tailcall) and the normal result happens via a sqrt instruction. */
3626 static basic_block
3627 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3629 rtx_insn *last2, *last;
3630 edge e;
3631 edge_iterator ei;
3632 int probability;
3633 gcov_type count;
3635 last2 = last = expand_gimple_stmt (stmt);
3637 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3638 if (CALL_P (last) && SIBLING_CALL_P (last))
3639 goto found;
3641 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3643 *can_fallthru = true;
3644 return NULL;
3646 found:
3647 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3648 Any instructions emitted here are about to be deleted. */
3649 do_pending_stack_adjust ();
3651 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3652 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3653 EH or abnormal edges, we shouldn't have created a tail call in
3654 the first place. So it seems to me we should just be removing
3655 all edges here, or redirecting the existing fallthru edge to
3656 the exit block. */
3658 probability = 0;
3659 count = 0;
3661 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3663 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3665 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3667 e->dest->count -= e->count;
3668 e->dest->frequency -= EDGE_FREQUENCY (e);
3669 if (e->dest->count < 0)
3670 e->dest->count = 0;
3671 if (e->dest->frequency < 0)
3672 e->dest->frequency = 0;
3674 count += e->count;
3675 probability += e->probability;
3676 remove_edge (e);
3678 else
3679 ei_next (&ei);
3682 /* This is somewhat ugly: the call_expr expander often emits instructions
3683 after the sibcall (to perform the function return). These confuse the
3684 find_many_sub_basic_blocks code, so we need to get rid of these. */
3685 last = NEXT_INSN (last);
3686 gcc_assert (BARRIER_P (last));
3688 *can_fallthru = false;
3689 while (NEXT_INSN (last))
3691 /* For instance an sqrt builtin expander expands if with
3692 sibcall in the then and label for `else`. */
3693 if (LABEL_P (NEXT_INSN (last)))
3695 *can_fallthru = true;
3696 break;
3698 delete_insn (NEXT_INSN (last));
3701 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3702 | EDGE_SIBCALL);
3703 e->probability += probability;
3704 e->count += count;
3705 BB_END (bb) = last;
3706 update_bb_for_insn (bb);
3708 if (NEXT_INSN (last))
3710 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3712 last = BB_END (bb);
3713 if (BARRIER_P (last))
3714 BB_END (bb) = PREV_INSN (last);
3717 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3719 return bb;
3722 /* Return the difference between the floor and the truncated result of
3723 a signed division by OP1 with remainder MOD. */
3724 static rtx
3725 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3727 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3728 return gen_rtx_IF_THEN_ELSE
3729 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3730 gen_rtx_IF_THEN_ELSE
3731 (mode, gen_rtx_LT (BImode,
3732 gen_rtx_DIV (mode, op1, mod),
3733 const0_rtx),
3734 constm1_rtx, const0_rtx),
3735 const0_rtx);
3738 /* Return the difference between the ceil and the truncated result of
3739 a signed division by OP1 with remainder MOD. */
3740 static rtx
3741 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3743 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3744 return gen_rtx_IF_THEN_ELSE
3745 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3746 gen_rtx_IF_THEN_ELSE
3747 (mode, gen_rtx_GT (BImode,
3748 gen_rtx_DIV (mode, op1, mod),
3749 const0_rtx),
3750 const1_rtx, const0_rtx),
3751 const0_rtx);
3754 /* Return the difference between the ceil and the truncated result of
3755 an unsigned division by OP1 with remainder MOD. */
3756 static rtx
3757 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3759 /* (mod != 0 ? 1 : 0) */
3760 return gen_rtx_IF_THEN_ELSE
3761 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3762 const1_rtx, const0_rtx);
3765 /* Return the difference between the rounded and the truncated result
3766 of a signed division by OP1 with remainder MOD. Halfway cases are
3767 rounded away from zero, rather than to the nearest even number. */
3768 static rtx
3769 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3771 /* (abs (mod) >= abs (op1) - abs (mod)
3772 ? (op1 / mod > 0 ? 1 : -1)
3773 : 0) */
3774 return gen_rtx_IF_THEN_ELSE
3775 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3776 gen_rtx_MINUS (mode,
3777 gen_rtx_ABS (mode, op1),
3778 gen_rtx_ABS (mode, mod))),
3779 gen_rtx_IF_THEN_ELSE
3780 (mode, gen_rtx_GT (BImode,
3781 gen_rtx_DIV (mode, op1, mod),
3782 const0_rtx),
3783 const1_rtx, constm1_rtx),
3784 const0_rtx);
3787 /* Return the difference between the rounded and the truncated result
3788 of a unsigned division by OP1 with remainder MOD. Halfway cases
3789 are rounded away from zero, rather than to the nearest even
3790 number. */
3791 static rtx
3792 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3794 /* (mod >= op1 - mod ? 1 : 0) */
3795 return gen_rtx_IF_THEN_ELSE
3796 (mode, gen_rtx_GE (BImode, mod,
3797 gen_rtx_MINUS (mode, op1, mod)),
3798 const1_rtx, const0_rtx);
3801 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3802 any rtl. */
3804 static rtx
3805 convert_debug_memory_address (machine_mode mode, rtx x,
3806 addr_space_t as)
3808 machine_mode xmode = GET_MODE (x);
3810 #ifndef POINTERS_EXTEND_UNSIGNED
3811 gcc_assert (mode == Pmode
3812 || mode == targetm.addr_space.address_mode (as));
3813 gcc_assert (xmode == mode || xmode == VOIDmode);
3814 #else
3815 rtx temp;
3817 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3819 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3820 return x;
3822 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3823 x = simplify_gen_subreg (mode, x, xmode,
3824 subreg_lowpart_offset
3825 (mode, xmode));
3826 else if (POINTERS_EXTEND_UNSIGNED > 0)
3827 x = gen_rtx_ZERO_EXTEND (mode, x);
3828 else if (!POINTERS_EXTEND_UNSIGNED)
3829 x = gen_rtx_SIGN_EXTEND (mode, x);
3830 else
3832 switch (GET_CODE (x))
3834 case SUBREG:
3835 if ((SUBREG_PROMOTED_VAR_P (x)
3836 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3837 || (GET_CODE (SUBREG_REG (x)) == PLUS
3838 && REG_P (XEXP (SUBREG_REG (x), 0))
3839 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3840 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3841 && GET_MODE (SUBREG_REG (x)) == mode)
3842 return SUBREG_REG (x);
3843 break;
3844 case LABEL_REF:
3845 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3846 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3847 return temp;
3848 case SYMBOL_REF:
3849 temp = shallow_copy_rtx (x);
3850 PUT_MODE (temp, mode);
3851 return temp;
3852 case CONST:
3853 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3854 if (temp)
3855 temp = gen_rtx_CONST (mode, temp);
3856 return temp;
3857 case PLUS:
3858 case MINUS:
3859 if (CONST_INT_P (XEXP (x, 1)))
3861 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3862 if (temp)
3863 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3865 break;
3866 default:
3867 break;
3869 /* Don't know how to express ptr_extend as operation in debug info. */
3870 return NULL;
3872 #endif /* POINTERS_EXTEND_UNSIGNED */
3874 return x;
3877 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3878 by avoid_deep_ter_for_debug. */
3880 static hash_map<tree, tree> *deep_ter_debug_map;
3882 /* Split too deep TER chains for debug stmts using debug temporaries. */
3884 static void
3885 avoid_deep_ter_for_debug (gimple stmt, int depth)
3887 use_operand_p use_p;
3888 ssa_op_iter iter;
3889 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3891 tree use = USE_FROM_PTR (use_p);
3892 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3893 continue;
3894 gimple g = get_gimple_for_ssa_name (use);
3895 if (g == NULL)
3896 continue;
3897 if (depth > 6 && !stmt_ends_bb_p (g))
3899 if (deep_ter_debug_map == NULL)
3900 deep_ter_debug_map = new hash_map<tree, tree>;
3902 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3903 if (vexpr != NULL)
3904 continue;
3905 vexpr = make_node (DEBUG_EXPR_DECL);
3906 gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3907 DECL_ARTIFICIAL (vexpr) = 1;
3908 TREE_TYPE (vexpr) = TREE_TYPE (use);
3909 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3910 gimple_stmt_iterator gsi = gsi_for_stmt (g);
3911 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3912 avoid_deep_ter_for_debug (def_temp, 0);
3914 else
3915 avoid_deep_ter_for_debug (g, depth + 1);
3919 /* Return an RTX equivalent to the value of the parameter DECL. */
3921 static rtx
3922 expand_debug_parm_decl (tree decl)
3924 rtx incoming = DECL_INCOMING_RTL (decl);
3926 if (incoming
3927 && GET_MODE (incoming) != BLKmode
3928 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3929 || (MEM_P (incoming)
3930 && REG_P (XEXP (incoming, 0))
3931 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3933 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3935 #ifdef HAVE_window_save
3936 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3937 If the target machine has an explicit window save instruction, the
3938 actual entry value is the corresponding OUTGOING_REGNO instead. */
3939 if (REG_P (incoming)
3940 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3941 incoming
3942 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3943 OUTGOING_REGNO (REGNO (incoming)), 0);
3944 else if (MEM_P (incoming))
3946 rtx reg = XEXP (incoming, 0);
3947 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3949 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3950 incoming = replace_equiv_address_nv (incoming, reg);
3952 else
3953 incoming = copy_rtx (incoming);
3955 #endif
3957 ENTRY_VALUE_EXP (rtl) = incoming;
3958 return rtl;
3961 if (incoming
3962 && GET_MODE (incoming) != BLKmode
3963 && !TREE_ADDRESSABLE (decl)
3964 && MEM_P (incoming)
3965 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3966 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3967 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3968 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3969 return copy_rtx (incoming);
3971 return NULL_RTX;
3974 /* Return an RTX equivalent to the value of the tree expression EXP. */
3976 static rtx
3977 expand_debug_expr (tree exp)
3979 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3980 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3981 machine_mode inner_mode = VOIDmode;
3982 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3983 addr_space_t as;
3985 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3987 case tcc_expression:
3988 switch (TREE_CODE (exp))
3990 case COND_EXPR:
3991 case DOT_PROD_EXPR:
3992 case SAD_EXPR:
3993 case WIDEN_MULT_PLUS_EXPR:
3994 case WIDEN_MULT_MINUS_EXPR:
3995 case FMA_EXPR:
3996 goto ternary;
3998 case TRUTH_ANDIF_EXPR:
3999 case TRUTH_ORIF_EXPR:
4000 case TRUTH_AND_EXPR:
4001 case TRUTH_OR_EXPR:
4002 case TRUTH_XOR_EXPR:
4003 goto binary;
4005 case TRUTH_NOT_EXPR:
4006 goto unary;
4008 default:
4009 break;
4011 break;
4013 ternary:
4014 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4015 if (!op2)
4016 return NULL_RTX;
4017 /* Fall through. */
4019 binary:
4020 case tcc_binary:
4021 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4022 if (!op1)
4023 return NULL_RTX;
4024 switch (TREE_CODE (exp))
4026 case LSHIFT_EXPR:
4027 case RSHIFT_EXPR:
4028 case LROTATE_EXPR:
4029 case RROTATE_EXPR:
4030 case WIDEN_LSHIFT_EXPR:
4031 /* Ensure second operand isn't wider than the first one. */
4032 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4033 if (SCALAR_INT_MODE_P (inner_mode))
4035 machine_mode opmode = mode;
4036 if (VECTOR_MODE_P (mode))
4037 opmode = GET_MODE_INNER (mode);
4038 if (SCALAR_INT_MODE_P (opmode)
4039 && (GET_MODE_PRECISION (opmode)
4040 < GET_MODE_PRECISION (inner_mode)))
4041 op1 = simplify_gen_subreg (opmode, op1, inner_mode,
4042 subreg_lowpart_offset (opmode,
4043 inner_mode));
4045 break;
4046 default:
4047 break;
4049 /* Fall through. */
4051 unary:
4052 case tcc_unary:
4053 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4054 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4055 if (!op0)
4056 return NULL_RTX;
4057 break;
4059 case tcc_comparison:
4060 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4061 goto binary;
4063 case tcc_type:
4064 case tcc_statement:
4065 gcc_unreachable ();
4067 case tcc_constant:
4068 case tcc_exceptional:
4069 case tcc_declaration:
4070 case tcc_reference:
4071 case tcc_vl_exp:
4072 break;
4075 switch (TREE_CODE (exp))
4077 case STRING_CST:
4078 if (!lookup_constant_def (exp))
4080 if (strlen (TREE_STRING_POINTER (exp)) + 1
4081 != (size_t) TREE_STRING_LENGTH (exp))
4082 return NULL_RTX;
4083 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4084 op0 = gen_rtx_MEM (BLKmode, op0);
4085 set_mem_attributes (op0, exp, 0);
4086 return op0;
4088 /* Fall through... */
4090 case INTEGER_CST:
4091 case REAL_CST:
4092 case FIXED_CST:
4093 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4094 return op0;
4096 case COMPLEX_CST:
4097 gcc_assert (COMPLEX_MODE_P (mode));
4098 op0 = expand_debug_expr (TREE_REALPART (exp));
4099 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4100 return gen_rtx_CONCAT (mode, op0, op1);
4102 case DEBUG_EXPR_DECL:
4103 op0 = DECL_RTL_IF_SET (exp);
4105 if (op0)
4106 return op0;
4108 op0 = gen_rtx_DEBUG_EXPR (mode);
4109 DEBUG_EXPR_TREE_DECL (op0) = exp;
4110 SET_DECL_RTL (exp, op0);
4112 return op0;
4114 case VAR_DECL:
4115 case PARM_DECL:
4116 case FUNCTION_DECL:
4117 case LABEL_DECL:
4118 case CONST_DECL:
4119 case RESULT_DECL:
4120 op0 = DECL_RTL_IF_SET (exp);
4122 /* This decl was probably optimized away. */
4123 if (!op0)
4125 if (TREE_CODE (exp) != VAR_DECL
4126 || DECL_EXTERNAL (exp)
4127 || !TREE_STATIC (exp)
4128 || !DECL_NAME (exp)
4129 || DECL_HARD_REGISTER (exp)
4130 || DECL_IN_CONSTANT_POOL (exp)
4131 || mode == VOIDmode)
4132 return NULL;
4134 op0 = make_decl_rtl_for_debug (exp);
4135 if (!MEM_P (op0)
4136 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4137 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4138 return NULL;
4140 else
4141 op0 = copy_rtx (op0);
4143 if (GET_MODE (op0) == BLKmode
4144 /* If op0 is not BLKmode, but mode is, adjust_mode
4145 below would ICE. While it is likely a FE bug,
4146 try to be robust here. See PR43166. */
4147 || mode == BLKmode
4148 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4150 gcc_assert (MEM_P (op0));
4151 op0 = adjust_address_nv (op0, mode, 0);
4152 return op0;
4155 /* Fall through. */
4157 adjust_mode:
4158 case PAREN_EXPR:
4159 CASE_CONVERT:
4161 inner_mode = GET_MODE (op0);
4163 if (mode == inner_mode)
4164 return op0;
4166 if (inner_mode == VOIDmode)
4168 if (TREE_CODE (exp) == SSA_NAME)
4169 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4170 else
4171 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4172 if (mode == inner_mode)
4173 return op0;
4176 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4178 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4179 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4180 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4181 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4182 else
4183 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4185 else if (FLOAT_MODE_P (mode))
4187 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4188 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4189 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4190 else
4191 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4193 else if (FLOAT_MODE_P (inner_mode))
4195 if (unsignedp)
4196 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4197 else
4198 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4200 else if (CONSTANT_P (op0)
4201 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4202 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4203 subreg_lowpart_offset (mode,
4204 inner_mode));
4205 else if (UNARY_CLASS_P (exp)
4206 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4207 : unsignedp)
4208 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4209 else
4210 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4212 return op0;
4215 case MEM_REF:
4216 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4218 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4219 TREE_OPERAND (exp, 0),
4220 TREE_OPERAND (exp, 1));
4221 if (newexp)
4222 return expand_debug_expr (newexp);
4224 /* FALLTHROUGH */
4225 case INDIRECT_REF:
4226 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4227 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4228 if (!op0)
4229 return NULL;
4231 if (TREE_CODE (exp) == MEM_REF)
4233 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4234 || (GET_CODE (op0) == PLUS
4235 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4236 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4237 Instead just use get_inner_reference. */
4238 goto component_ref;
4240 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4241 if (!op1 || !CONST_INT_P (op1))
4242 return NULL;
4244 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4247 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4249 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4250 op0, as);
4251 if (op0 == NULL_RTX)
4252 return NULL;
4254 op0 = gen_rtx_MEM (mode, op0);
4255 set_mem_attributes (op0, exp, 0);
4256 if (TREE_CODE (exp) == MEM_REF
4257 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4258 set_mem_expr (op0, NULL_TREE);
4259 set_mem_addr_space (op0, as);
4261 return op0;
4263 case TARGET_MEM_REF:
4264 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4265 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4266 return NULL;
4268 op0 = expand_debug_expr
4269 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4270 if (!op0)
4271 return NULL;
4273 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4274 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4275 op0, as);
4276 if (op0 == NULL_RTX)
4277 return NULL;
4279 op0 = gen_rtx_MEM (mode, op0);
4281 set_mem_attributes (op0, exp, 0);
4282 set_mem_addr_space (op0, as);
4284 return op0;
4286 component_ref:
4287 case ARRAY_REF:
4288 case ARRAY_RANGE_REF:
4289 case COMPONENT_REF:
4290 case BIT_FIELD_REF:
4291 case REALPART_EXPR:
4292 case IMAGPART_EXPR:
4293 case VIEW_CONVERT_EXPR:
4295 machine_mode mode1;
4296 HOST_WIDE_INT bitsize, bitpos;
4297 tree offset;
4298 int volatilep = 0;
4299 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4300 &mode1, &unsignedp, &volatilep, false);
4301 rtx orig_op0;
4303 if (bitsize == 0)
4304 return NULL;
4306 orig_op0 = op0 = expand_debug_expr (tem);
4308 if (!op0)
4309 return NULL;
4311 if (offset)
4313 machine_mode addrmode, offmode;
4315 if (!MEM_P (op0))
4316 return NULL;
4318 op0 = XEXP (op0, 0);
4319 addrmode = GET_MODE (op0);
4320 if (addrmode == VOIDmode)
4321 addrmode = Pmode;
4323 op1 = expand_debug_expr (offset);
4324 if (!op1)
4325 return NULL;
4327 offmode = GET_MODE (op1);
4328 if (offmode == VOIDmode)
4329 offmode = TYPE_MODE (TREE_TYPE (offset));
4331 if (addrmode != offmode)
4332 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4333 subreg_lowpart_offset (addrmode,
4334 offmode));
4336 /* Don't use offset_address here, we don't need a
4337 recognizable address, and we don't want to generate
4338 code. */
4339 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4340 op0, op1));
4343 if (MEM_P (op0))
4345 if (mode1 == VOIDmode)
4346 /* Bitfield. */
4347 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4348 if (bitpos >= BITS_PER_UNIT)
4350 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4351 bitpos %= BITS_PER_UNIT;
4353 else if (bitpos < 0)
4355 HOST_WIDE_INT units
4356 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4357 op0 = adjust_address_nv (op0, mode1, units);
4358 bitpos += units * BITS_PER_UNIT;
4360 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4361 op0 = adjust_address_nv (op0, mode, 0);
4362 else if (GET_MODE (op0) != mode1)
4363 op0 = adjust_address_nv (op0, mode1, 0);
4364 else
4365 op0 = copy_rtx (op0);
4366 if (op0 == orig_op0)
4367 op0 = shallow_copy_rtx (op0);
4368 set_mem_attributes (op0, exp, 0);
4371 if (bitpos == 0 && mode == GET_MODE (op0))
4372 return op0;
4374 if (bitpos < 0)
4375 return NULL;
4377 if (GET_MODE (op0) == BLKmode)
4378 return NULL;
4380 if ((bitpos % BITS_PER_UNIT) == 0
4381 && bitsize == GET_MODE_BITSIZE (mode1))
4383 machine_mode opmode = GET_MODE (op0);
4385 if (opmode == VOIDmode)
4386 opmode = TYPE_MODE (TREE_TYPE (tem));
4388 /* This condition may hold if we're expanding the address
4389 right past the end of an array that turned out not to
4390 be addressable (i.e., the address was only computed in
4391 debug stmts). The gen_subreg below would rightfully
4392 crash, and the address doesn't really exist, so just
4393 drop it. */
4394 if (bitpos >= GET_MODE_BITSIZE (opmode))
4395 return NULL;
4397 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4398 return simplify_gen_subreg (mode, op0, opmode,
4399 bitpos / BITS_PER_UNIT);
4402 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4403 && TYPE_UNSIGNED (TREE_TYPE (exp))
4404 ? SIGN_EXTRACT
4405 : ZERO_EXTRACT, mode,
4406 GET_MODE (op0) != VOIDmode
4407 ? GET_MODE (op0)
4408 : TYPE_MODE (TREE_TYPE (tem)),
4409 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4412 case ABS_EXPR:
4413 return simplify_gen_unary (ABS, mode, op0, mode);
4415 case NEGATE_EXPR:
4416 return simplify_gen_unary (NEG, mode, op0, mode);
4418 case BIT_NOT_EXPR:
4419 return simplify_gen_unary (NOT, mode, op0, mode);
4421 case FLOAT_EXPR:
4422 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4423 0)))
4424 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4425 inner_mode);
4427 case FIX_TRUNC_EXPR:
4428 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4429 inner_mode);
4431 case POINTER_PLUS_EXPR:
4432 /* For the rare target where pointers are not the same size as
4433 size_t, we need to check for mis-matched modes and correct
4434 the addend. */
4435 if (op0 && op1
4436 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4437 && GET_MODE (op0) != GET_MODE (op1))
4439 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4440 /* If OP0 is a partial mode, then we must truncate, even if it has
4441 the same bitsize as OP1 as GCC's representation of partial modes
4442 is opaque. */
4443 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4444 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4445 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4446 GET_MODE (op1));
4447 else
4448 /* We always sign-extend, regardless of the signedness of
4449 the operand, because the operand is always unsigned
4450 here even if the original C expression is signed. */
4451 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4452 GET_MODE (op1));
4454 /* Fall through. */
4455 case PLUS_EXPR:
4456 return simplify_gen_binary (PLUS, mode, op0, op1);
4458 case MINUS_EXPR:
4459 return simplify_gen_binary (MINUS, mode, op0, op1);
4461 case MULT_EXPR:
4462 return simplify_gen_binary (MULT, mode, op0, op1);
4464 case RDIV_EXPR:
4465 case TRUNC_DIV_EXPR:
4466 case EXACT_DIV_EXPR:
4467 if (unsignedp)
4468 return simplify_gen_binary (UDIV, mode, op0, op1);
4469 else
4470 return simplify_gen_binary (DIV, mode, op0, op1);
4472 case TRUNC_MOD_EXPR:
4473 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4475 case FLOOR_DIV_EXPR:
4476 if (unsignedp)
4477 return simplify_gen_binary (UDIV, mode, op0, op1);
4478 else
4480 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4481 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4482 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4483 return simplify_gen_binary (PLUS, mode, div, adj);
4486 case FLOOR_MOD_EXPR:
4487 if (unsignedp)
4488 return simplify_gen_binary (UMOD, mode, op0, op1);
4489 else
4491 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4492 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4493 adj = simplify_gen_unary (NEG, mode,
4494 simplify_gen_binary (MULT, mode, adj, op1),
4495 mode);
4496 return simplify_gen_binary (PLUS, mode, mod, adj);
4499 case CEIL_DIV_EXPR:
4500 if (unsignedp)
4502 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4503 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4504 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4505 return simplify_gen_binary (PLUS, mode, div, adj);
4507 else
4509 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4510 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4511 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4512 return simplify_gen_binary (PLUS, mode, div, adj);
4515 case CEIL_MOD_EXPR:
4516 if (unsignedp)
4518 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4519 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4520 adj = simplify_gen_unary (NEG, mode,
4521 simplify_gen_binary (MULT, mode, adj, op1),
4522 mode);
4523 return simplify_gen_binary (PLUS, mode, mod, adj);
4525 else
4527 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4528 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4529 adj = simplify_gen_unary (NEG, mode,
4530 simplify_gen_binary (MULT, mode, adj, op1),
4531 mode);
4532 return simplify_gen_binary (PLUS, mode, mod, adj);
4535 case ROUND_DIV_EXPR:
4536 if (unsignedp)
4538 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4539 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4540 rtx adj = round_udiv_adjust (mode, mod, op1);
4541 return simplify_gen_binary (PLUS, mode, div, adj);
4543 else
4545 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4546 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4547 rtx adj = round_sdiv_adjust (mode, mod, op1);
4548 return simplify_gen_binary (PLUS, mode, div, adj);
4551 case ROUND_MOD_EXPR:
4552 if (unsignedp)
4554 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4555 rtx adj = round_udiv_adjust (mode, mod, op1);
4556 adj = simplify_gen_unary (NEG, mode,
4557 simplify_gen_binary (MULT, mode, adj, op1),
4558 mode);
4559 return simplify_gen_binary (PLUS, mode, mod, adj);
4561 else
4563 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4564 rtx adj = round_sdiv_adjust (mode, mod, op1);
4565 adj = simplify_gen_unary (NEG, mode,
4566 simplify_gen_binary (MULT, mode, adj, op1),
4567 mode);
4568 return simplify_gen_binary (PLUS, mode, mod, adj);
4571 case LSHIFT_EXPR:
4572 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4574 case RSHIFT_EXPR:
4575 if (unsignedp)
4576 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4577 else
4578 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4580 case LROTATE_EXPR:
4581 return simplify_gen_binary (ROTATE, mode, op0, op1);
4583 case RROTATE_EXPR:
4584 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4586 case MIN_EXPR:
4587 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4589 case MAX_EXPR:
4590 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4592 case BIT_AND_EXPR:
4593 case TRUTH_AND_EXPR:
4594 return simplify_gen_binary (AND, mode, op0, op1);
4596 case BIT_IOR_EXPR:
4597 case TRUTH_OR_EXPR:
4598 return simplify_gen_binary (IOR, mode, op0, op1);
4600 case BIT_XOR_EXPR:
4601 case TRUTH_XOR_EXPR:
4602 return simplify_gen_binary (XOR, mode, op0, op1);
4604 case TRUTH_ANDIF_EXPR:
4605 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4607 case TRUTH_ORIF_EXPR:
4608 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4610 case TRUTH_NOT_EXPR:
4611 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4613 case LT_EXPR:
4614 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4615 op0, op1);
4617 case LE_EXPR:
4618 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4619 op0, op1);
4621 case GT_EXPR:
4622 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4623 op0, op1);
4625 case GE_EXPR:
4626 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4627 op0, op1);
4629 case EQ_EXPR:
4630 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4632 case NE_EXPR:
4633 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4635 case UNORDERED_EXPR:
4636 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4638 case ORDERED_EXPR:
4639 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4641 case UNLT_EXPR:
4642 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4644 case UNLE_EXPR:
4645 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4647 case UNGT_EXPR:
4648 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4650 case UNGE_EXPR:
4651 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4653 case UNEQ_EXPR:
4654 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4656 case LTGT_EXPR:
4657 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4659 case COND_EXPR:
4660 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4662 case COMPLEX_EXPR:
4663 gcc_assert (COMPLEX_MODE_P (mode));
4664 if (GET_MODE (op0) == VOIDmode)
4665 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4666 if (GET_MODE (op1) == VOIDmode)
4667 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4668 return gen_rtx_CONCAT (mode, op0, op1);
4670 case CONJ_EXPR:
4671 if (GET_CODE (op0) == CONCAT)
4672 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4673 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4674 XEXP (op0, 1),
4675 GET_MODE_INNER (mode)));
4676 else
4678 machine_mode imode = GET_MODE_INNER (mode);
4679 rtx re, im;
4681 if (MEM_P (op0))
4683 re = adjust_address_nv (op0, imode, 0);
4684 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4686 else
4688 machine_mode ifmode = int_mode_for_mode (mode);
4689 machine_mode ihmode = int_mode_for_mode (imode);
4690 rtx halfsize;
4691 if (ifmode == BLKmode || ihmode == BLKmode)
4692 return NULL;
4693 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4694 re = op0;
4695 if (mode != ifmode)
4696 re = gen_rtx_SUBREG (ifmode, re, 0);
4697 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4698 if (imode != ihmode)
4699 re = gen_rtx_SUBREG (imode, re, 0);
4700 im = copy_rtx (op0);
4701 if (mode != ifmode)
4702 im = gen_rtx_SUBREG (ifmode, im, 0);
4703 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4704 if (imode != ihmode)
4705 im = gen_rtx_SUBREG (imode, im, 0);
4707 im = gen_rtx_NEG (imode, im);
4708 return gen_rtx_CONCAT (mode, re, im);
4711 case ADDR_EXPR:
4712 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4713 if (!op0 || !MEM_P (op0))
4715 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4716 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4717 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4718 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4719 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4720 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4722 if (handled_component_p (TREE_OPERAND (exp, 0)))
4724 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4725 tree decl
4726 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4727 &bitoffset, &bitsize, &maxsize);
4728 if ((TREE_CODE (decl) == VAR_DECL
4729 || TREE_CODE (decl) == PARM_DECL
4730 || TREE_CODE (decl) == RESULT_DECL)
4731 && (!TREE_ADDRESSABLE (decl)
4732 || target_for_debug_bind (decl))
4733 && (bitoffset % BITS_PER_UNIT) == 0
4734 && bitsize > 0
4735 && bitsize == maxsize)
4737 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4738 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4742 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4743 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4744 == ADDR_EXPR)
4746 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4747 0));
4748 if (op0 != NULL
4749 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4750 || (GET_CODE (op0) == PLUS
4751 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4752 && CONST_INT_P (XEXP (op0, 1)))))
4754 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4755 1));
4756 if (!op1 || !CONST_INT_P (op1))
4757 return NULL;
4759 return plus_constant (mode, op0, INTVAL (op1));
4763 return NULL;
4766 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4767 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4769 return op0;
4771 case VECTOR_CST:
4773 unsigned i;
4775 op0 = gen_rtx_CONCATN
4776 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4778 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4780 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4781 if (!op1)
4782 return NULL;
4783 XVECEXP (op0, 0, i) = op1;
4786 return op0;
4789 case CONSTRUCTOR:
4790 if (TREE_CLOBBER_P (exp))
4791 return NULL;
4792 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4794 unsigned i;
4795 tree val;
4797 op0 = gen_rtx_CONCATN
4798 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4800 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4802 op1 = expand_debug_expr (val);
4803 if (!op1)
4804 return NULL;
4805 XVECEXP (op0, 0, i) = op1;
4808 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4810 op1 = expand_debug_expr
4811 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4813 if (!op1)
4814 return NULL;
4816 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4817 XVECEXP (op0, 0, i) = op1;
4820 return op0;
4822 else
4823 goto flag_unsupported;
4825 case CALL_EXPR:
4826 /* ??? Maybe handle some builtins? */
4827 return NULL;
4829 case SSA_NAME:
4831 gimple g = get_gimple_for_ssa_name (exp);
4832 if (g)
4834 tree t = NULL_TREE;
4835 if (deep_ter_debug_map)
4837 tree *slot = deep_ter_debug_map->get (exp);
4838 if (slot)
4839 t = *slot;
4841 if (t == NULL_TREE)
4842 t = gimple_assign_rhs_to_tree (g);
4843 op0 = expand_debug_expr (t);
4844 if (!op0)
4845 return NULL;
4847 else
4849 int part = var_to_partition (SA.map, exp);
4851 if (part == NO_PARTITION)
4853 /* If this is a reference to an incoming value of parameter
4854 that is never used in the code or where the incoming
4855 value is never used in the code, use PARM_DECL's
4856 DECL_RTL if set. */
4857 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4858 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4860 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4861 if (op0)
4862 goto adjust_mode;
4863 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4864 if (op0)
4865 goto adjust_mode;
4867 return NULL;
4870 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4872 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4874 goto adjust_mode;
4877 case ERROR_MARK:
4878 return NULL;
4880 /* Vector stuff. For most of the codes we don't have rtl codes. */
4881 case REALIGN_LOAD_EXPR:
4882 case REDUC_MAX_EXPR:
4883 case REDUC_MIN_EXPR:
4884 case REDUC_PLUS_EXPR:
4885 case VEC_COND_EXPR:
4886 case VEC_PACK_FIX_TRUNC_EXPR:
4887 case VEC_PACK_SAT_EXPR:
4888 case VEC_PACK_TRUNC_EXPR:
4889 case VEC_UNPACK_FLOAT_HI_EXPR:
4890 case VEC_UNPACK_FLOAT_LO_EXPR:
4891 case VEC_UNPACK_HI_EXPR:
4892 case VEC_UNPACK_LO_EXPR:
4893 case VEC_WIDEN_MULT_HI_EXPR:
4894 case VEC_WIDEN_MULT_LO_EXPR:
4895 case VEC_WIDEN_MULT_EVEN_EXPR:
4896 case VEC_WIDEN_MULT_ODD_EXPR:
4897 case VEC_WIDEN_LSHIFT_HI_EXPR:
4898 case VEC_WIDEN_LSHIFT_LO_EXPR:
4899 case VEC_PERM_EXPR:
4900 return NULL;
4902 /* Misc codes. */
4903 case ADDR_SPACE_CONVERT_EXPR:
4904 case FIXED_CONVERT_EXPR:
4905 case OBJ_TYPE_REF:
4906 case WITH_SIZE_EXPR:
4907 return NULL;
4909 case DOT_PROD_EXPR:
4910 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4911 && SCALAR_INT_MODE_P (mode))
4914 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4915 0)))
4916 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4917 inner_mode);
4919 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4920 1)))
4921 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4922 inner_mode);
4923 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4924 return simplify_gen_binary (PLUS, mode, op0, op2);
4926 return NULL;
4928 case WIDEN_MULT_EXPR:
4929 case WIDEN_MULT_PLUS_EXPR:
4930 case WIDEN_MULT_MINUS_EXPR:
4931 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4932 && SCALAR_INT_MODE_P (mode))
4934 inner_mode = GET_MODE (op0);
4935 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4936 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4937 else
4938 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4939 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4940 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4941 else
4942 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4943 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4944 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4945 return op0;
4946 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4947 return simplify_gen_binary (PLUS, mode, op0, op2);
4948 else
4949 return simplify_gen_binary (MINUS, mode, op2, op0);
4951 return NULL;
4953 case MULT_HIGHPART_EXPR:
4954 /* ??? Similar to the above. */
4955 return NULL;
4957 case WIDEN_SUM_EXPR:
4958 case WIDEN_LSHIFT_EXPR:
4959 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4960 && SCALAR_INT_MODE_P (mode))
4963 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4964 0)))
4965 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4966 inner_mode);
4967 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4968 ? ASHIFT : PLUS, mode, op0, op1);
4970 return NULL;
4972 case FMA_EXPR:
4973 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4975 default:
4976 flag_unsupported:
4977 #ifdef ENABLE_CHECKING
4978 debug_tree (exp);
4979 gcc_unreachable ();
4980 #else
4981 return NULL;
4982 #endif
4986 /* Return an RTX equivalent to the source bind value of the tree expression
4987 EXP. */
4989 static rtx
4990 expand_debug_source_expr (tree exp)
4992 rtx op0 = NULL_RTX;
4993 machine_mode mode = VOIDmode, inner_mode;
4995 switch (TREE_CODE (exp))
4997 case PARM_DECL:
4999 mode = DECL_MODE (exp);
5000 op0 = expand_debug_parm_decl (exp);
5001 if (op0)
5002 break;
5003 /* See if this isn't an argument that has been completely
5004 optimized out. */
5005 if (!DECL_RTL_SET_P (exp)
5006 && !DECL_INCOMING_RTL (exp)
5007 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5009 tree aexp = DECL_ORIGIN (exp);
5010 if (DECL_CONTEXT (aexp)
5011 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5013 vec<tree, va_gc> **debug_args;
5014 unsigned int ix;
5015 tree ddecl;
5016 debug_args = decl_debug_args_lookup (current_function_decl);
5017 if (debug_args != NULL)
5019 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5020 ix += 2)
5021 if (ddecl == aexp)
5022 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5026 break;
5028 default:
5029 break;
5032 if (op0 == NULL_RTX)
5033 return NULL_RTX;
5035 inner_mode = GET_MODE (op0);
5036 if (mode == inner_mode)
5037 return op0;
5039 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5041 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5042 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5043 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5044 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5045 else
5046 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5048 else if (FLOAT_MODE_P (mode))
5049 gcc_unreachable ();
5050 else if (FLOAT_MODE_P (inner_mode))
5052 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5053 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5054 else
5055 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5057 else if (CONSTANT_P (op0)
5058 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
5059 op0 = simplify_gen_subreg (mode, op0, inner_mode,
5060 subreg_lowpart_offset (mode, inner_mode));
5061 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5062 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5063 else
5064 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5066 return op0;
5069 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5070 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5071 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5073 static void
5074 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5076 rtx exp = *exp_p;
5078 if (exp == NULL_RTX)
5079 return;
5081 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5082 return;
5084 if (depth == 4)
5086 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5087 rtx dval = make_debug_expr_from_rtl (exp);
5089 /* Emit a debug bind insn before INSN. */
5090 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5091 DEBUG_EXPR_TREE_DECL (dval), exp,
5092 VAR_INIT_STATUS_INITIALIZED);
5094 emit_debug_insn_before (bind, insn);
5095 *exp_p = dval;
5096 return;
5099 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5100 int i, j;
5101 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5102 switch (*format_ptr++)
5104 case 'e':
5105 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5106 break;
5108 case 'E':
5109 case 'V':
5110 for (j = 0; j < XVECLEN (exp, i); j++)
5111 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5112 break;
5114 default:
5115 break;
5119 /* Expand the _LOCs in debug insns. We run this after expanding all
5120 regular insns, so that any variables referenced in the function
5121 will have their DECL_RTLs set. */
5123 static void
5124 expand_debug_locations (void)
5126 rtx_insn *insn;
5127 rtx_insn *last = get_last_insn ();
5128 int save_strict_alias = flag_strict_aliasing;
5130 /* New alias sets while setting up memory attributes cause
5131 -fcompare-debug failures, even though it doesn't bring about any
5132 codegen changes. */
5133 flag_strict_aliasing = 0;
5135 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5136 if (DEBUG_INSN_P (insn))
5138 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5139 rtx val;
5140 rtx_insn *prev_insn, *insn2;
5141 machine_mode mode;
5143 if (value == NULL_TREE)
5144 val = NULL_RTX;
5145 else
5147 if (INSN_VAR_LOCATION_STATUS (insn)
5148 == VAR_INIT_STATUS_UNINITIALIZED)
5149 val = expand_debug_source_expr (value);
5150 /* The avoid_deep_ter_for_debug function inserts
5151 debug bind stmts after SSA_NAME definition, with the
5152 SSA_NAME as the whole bind location. Disable temporarily
5153 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5154 being defined in this DEBUG_INSN. */
5155 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5157 tree *slot = deep_ter_debug_map->get (value);
5158 if (slot)
5160 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5161 *slot = NULL_TREE;
5162 else
5163 slot = NULL;
5165 val = expand_debug_expr (value);
5166 if (slot)
5167 *slot = INSN_VAR_LOCATION_DECL (insn);
5169 else
5170 val = expand_debug_expr (value);
5171 gcc_assert (last == get_last_insn ());
5174 if (!val)
5175 val = gen_rtx_UNKNOWN_VAR_LOC ();
5176 else
5178 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5180 gcc_assert (mode == GET_MODE (val)
5181 || (GET_MODE (val) == VOIDmode
5182 && (CONST_SCALAR_INT_P (val)
5183 || GET_CODE (val) == CONST_FIXED
5184 || GET_CODE (val) == LABEL_REF)));
5187 INSN_VAR_LOCATION_LOC (insn) = val;
5188 prev_insn = PREV_INSN (insn);
5189 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5190 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5193 flag_strict_aliasing = save_strict_alias;
5196 /* Performs swapping operands of commutative operations to expand
5197 the expensive one first. */
5199 static void
5200 reorder_operands (basic_block bb)
5202 unsigned int *lattice; /* Hold cost of each statement. */
5203 unsigned int i = 0, n = 0;
5204 gimple_stmt_iterator gsi;
5205 gimple_seq stmts;
5206 gimple stmt;
5207 bool swap;
5208 tree op0, op1;
5209 ssa_op_iter iter;
5210 use_operand_p use_p;
5211 gimple def0, def1;
5213 /* Compute cost of each statement using estimate_num_insns. */
5214 stmts = bb_seq (bb);
5215 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5217 stmt = gsi_stmt (gsi);
5218 if (!is_gimple_debug (stmt))
5219 gimple_set_uid (stmt, n++);
5221 lattice = XNEWVEC (unsigned int, n);
5222 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5224 unsigned cost;
5225 stmt = gsi_stmt (gsi);
5226 if (is_gimple_debug (stmt))
5227 continue;
5228 cost = estimate_num_insns (stmt, &eni_size_weights);
5229 lattice[i] = cost;
5230 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5232 tree use = USE_FROM_PTR (use_p);
5233 gimple def_stmt;
5234 if (TREE_CODE (use) != SSA_NAME)
5235 continue;
5236 def_stmt = get_gimple_for_ssa_name (use);
5237 if (!def_stmt)
5238 continue;
5239 lattice[i] += lattice[gimple_uid (def_stmt)];
5241 i++;
5242 if (!is_gimple_assign (stmt)
5243 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5244 continue;
5245 op0 = gimple_op (stmt, 1);
5246 op1 = gimple_op (stmt, 2);
5247 if (TREE_CODE (op0) != SSA_NAME
5248 || TREE_CODE (op1) != SSA_NAME)
5249 continue;
5250 /* Swap operands if the second one is more expensive. */
5251 def0 = get_gimple_for_ssa_name (op0);
5252 def1 = get_gimple_for_ssa_name (op1);
5253 if (!def1)
5254 continue;
5255 swap = false;
5256 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5257 swap = true;
5258 if (swap)
5260 if (dump_file && (dump_flags & TDF_DETAILS))
5262 fprintf (dump_file, "Swap operands in stmt:\n");
5263 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5264 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5265 def0 ? lattice[gimple_uid (def0)] : 0,
5266 lattice[gimple_uid (def1)]);
5268 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5269 gimple_assign_rhs2_ptr (stmt));
5272 XDELETE (lattice);
5275 /* Expand basic block BB from GIMPLE trees to RTL. */
5277 static basic_block
5278 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5280 gimple_stmt_iterator gsi;
5281 gimple_seq stmts;
5282 gimple stmt = NULL;
5283 rtx_note *note;
5284 rtx_insn *last;
5285 edge e;
5286 edge_iterator ei;
5288 if (dump_file)
5289 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5290 bb->index);
5292 /* Note that since we are now transitioning from GIMPLE to RTL, we
5293 cannot use the gsi_*_bb() routines because they expect the basic
5294 block to be in GIMPLE, instead of RTL. Therefore, we need to
5295 access the BB sequence directly. */
5296 if (optimize)
5297 reorder_operands (bb);
5298 stmts = bb_seq (bb);
5299 bb->il.gimple.seq = NULL;
5300 bb->il.gimple.phi_nodes = NULL;
5301 rtl_profile_for_bb (bb);
5302 init_rtl_bb_info (bb);
5303 bb->flags |= BB_RTL;
5305 /* Remove the RETURN_EXPR if we may fall though to the exit
5306 instead. */
5307 gsi = gsi_last (stmts);
5308 if (!gsi_end_p (gsi)
5309 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5311 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5313 gcc_assert (single_succ_p (bb));
5314 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5316 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5317 && !gimple_return_retval (ret_stmt))
5319 gsi_remove (&gsi, false);
5320 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5324 gsi = gsi_start (stmts);
5325 if (!gsi_end_p (gsi))
5327 stmt = gsi_stmt (gsi);
5328 if (gimple_code (stmt) != GIMPLE_LABEL)
5329 stmt = NULL;
5332 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5334 if (stmt || elt)
5336 last = get_last_insn ();
5338 if (stmt)
5340 expand_gimple_stmt (stmt);
5341 gsi_next (&gsi);
5344 if (elt)
5345 emit_label (*elt);
5347 /* Java emits line number notes in the top of labels.
5348 ??? Make this go away once line number notes are obsoleted. */
5349 BB_HEAD (bb) = NEXT_INSN (last);
5350 if (NOTE_P (BB_HEAD (bb)))
5351 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5352 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5354 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5356 else
5357 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5359 NOTE_BASIC_BLOCK (note) = bb;
5361 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5363 basic_block new_bb;
5365 stmt = gsi_stmt (gsi);
5367 /* If this statement is a non-debug one, and we generate debug
5368 insns, then this one might be the last real use of a TERed
5369 SSA_NAME, but where there are still some debug uses further
5370 down. Expanding the current SSA name in such further debug
5371 uses by their RHS might lead to wrong debug info, as coalescing
5372 might make the operands of such RHS be placed into the same
5373 pseudo as something else. Like so:
5374 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5375 use(a_1);
5376 a_2 = ...
5377 #DEBUG ... => a_1
5378 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5379 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5380 the write to a_2 would actually have clobbered the place which
5381 formerly held a_0.
5383 So, instead of that, we recognize the situation, and generate
5384 debug temporaries at the last real use of TERed SSA names:
5385 a_1 = a_0 + 1;
5386 #DEBUG #D1 => a_1
5387 use(a_1);
5388 a_2 = ...
5389 #DEBUG ... => #D1
5391 if (MAY_HAVE_DEBUG_INSNS
5392 && SA.values
5393 && !is_gimple_debug (stmt))
5395 ssa_op_iter iter;
5396 tree op;
5397 gimple def;
5399 location_t sloc = curr_insn_location ();
5401 /* Look for SSA names that have their last use here (TERed
5402 names always have only one real use). */
5403 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5404 if ((def = get_gimple_for_ssa_name (op)))
5406 imm_use_iterator imm_iter;
5407 use_operand_p use_p;
5408 bool have_debug_uses = false;
5410 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5412 if (gimple_debug_bind_p (USE_STMT (use_p)))
5414 have_debug_uses = true;
5415 break;
5419 if (have_debug_uses)
5421 /* OP is a TERed SSA name, with DEF its defining
5422 statement, and where OP is used in further debug
5423 instructions. Generate a debug temporary, and
5424 replace all uses of OP in debug insns with that
5425 temporary. */
5426 gimple debugstmt;
5427 tree value = gimple_assign_rhs_to_tree (def);
5428 tree vexpr = make_node (DEBUG_EXPR_DECL);
5429 rtx val;
5430 machine_mode mode;
5432 set_curr_insn_location (gimple_location (def));
5434 DECL_ARTIFICIAL (vexpr) = 1;
5435 TREE_TYPE (vexpr) = TREE_TYPE (value);
5436 if (DECL_P (value))
5437 mode = DECL_MODE (value);
5438 else
5439 mode = TYPE_MODE (TREE_TYPE (value));
5440 DECL_MODE (vexpr) = mode;
5442 val = gen_rtx_VAR_LOCATION
5443 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5445 emit_debug_insn (val);
5447 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5449 if (!gimple_debug_bind_p (debugstmt))
5450 continue;
5452 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5453 SET_USE (use_p, vexpr);
5455 update_stmt (debugstmt);
5459 set_curr_insn_location (sloc);
5462 currently_expanding_gimple_stmt = stmt;
5464 /* Expand this statement, then evaluate the resulting RTL and
5465 fixup the CFG accordingly. */
5466 if (gimple_code (stmt) == GIMPLE_COND)
5468 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5469 if (new_bb)
5470 return new_bb;
5472 else if (gimple_debug_bind_p (stmt))
5474 location_t sloc = curr_insn_location ();
5475 gimple_stmt_iterator nsi = gsi;
5477 for (;;)
5479 tree var = gimple_debug_bind_get_var (stmt);
5480 tree value;
5481 rtx val;
5482 machine_mode mode;
5484 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5485 && TREE_CODE (var) != LABEL_DECL
5486 && !target_for_debug_bind (var))
5487 goto delink_debug_stmt;
5489 if (gimple_debug_bind_has_value_p (stmt))
5490 value = gimple_debug_bind_get_value (stmt);
5491 else
5492 value = NULL_TREE;
5494 last = get_last_insn ();
5496 set_curr_insn_location (gimple_location (stmt));
5498 if (DECL_P (var))
5499 mode = DECL_MODE (var);
5500 else
5501 mode = TYPE_MODE (TREE_TYPE (var));
5503 val = gen_rtx_VAR_LOCATION
5504 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5506 emit_debug_insn (val);
5508 if (dump_file && (dump_flags & TDF_DETAILS))
5510 /* We can't dump the insn with a TREE where an RTX
5511 is expected. */
5512 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5513 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5514 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5517 delink_debug_stmt:
5518 /* In order not to generate too many debug temporaries,
5519 we delink all uses of debug statements we already expanded.
5520 Therefore debug statements between definition and real
5521 use of TERed SSA names will continue to use the SSA name,
5522 and not be replaced with debug temps. */
5523 delink_stmt_imm_use (stmt);
5525 gsi = nsi;
5526 gsi_next (&nsi);
5527 if (gsi_end_p (nsi))
5528 break;
5529 stmt = gsi_stmt (nsi);
5530 if (!gimple_debug_bind_p (stmt))
5531 break;
5534 set_curr_insn_location (sloc);
5536 else if (gimple_debug_source_bind_p (stmt))
5538 location_t sloc = curr_insn_location ();
5539 tree var = gimple_debug_source_bind_get_var (stmt);
5540 tree value = gimple_debug_source_bind_get_value (stmt);
5541 rtx val;
5542 machine_mode mode;
5544 last = get_last_insn ();
5546 set_curr_insn_location (gimple_location (stmt));
5548 mode = DECL_MODE (var);
5550 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5551 VAR_INIT_STATUS_UNINITIALIZED);
5553 emit_debug_insn (val);
5555 if (dump_file && (dump_flags & TDF_DETAILS))
5557 /* We can't dump the insn with a TREE where an RTX
5558 is expected. */
5559 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5560 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5561 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5564 set_curr_insn_location (sloc);
5566 else
5568 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5569 if (call_stmt
5570 && gimple_call_tail_p (call_stmt)
5571 && disable_tail_calls)
5572 gimple_call_set_tail (call_stmt, false);
5574 if (call_stmt && gimple_call_tail_p (call_stmt))
5576 bool can_fallthru;
5577 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5578 if (new_bb)
5580 if (can_fallthru)
5581 bb = new_bb;
5582 else
5583 return new_bb;
5586 else
5588 def_operand_p def_p;
5589 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5591 if (def_p != NULL)
5593 /* Ignore this stmt if it is in the list of
5594 replaceable expressions. */
5595 if (SA.values
5596 && bitmap_bit_p (SA.values,
5597 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5598 continue;
5600 last = expand_gimple_stmt (stmt);
5601 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5606 currently_expanding_gimple_stmt = NULL;
5608 /* Expand implicit goto and convert goto_locus. */
5609 FOR_EACH_EDGE (e, ei, bb->succs)
5611 if (e->goto_locus != UNKNOWN_LOCATION)
5612 set_curr_insn_location (e->goto_locus);
5613 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5615 emit_jump (label_rtx_for_bb (e->dest));
5616 e->flags &= ~EDGE_FALLTHRU;
5620 /* Expanded RTL can create a jump in the last instruction of block.
5621 This later might be assumed to be a jump to successor and break edge insertion.
5622 We need to insert dummy move to prevent this. PR41440. */
5623 if (single_succ_p (bb)
5624 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5625 && (last = get_last_insn ())
5626 && JUMP_P (last))
5628 rtx dummy = gen_reg_rtx (SImode);
5629 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5632 do_pending_stack_adjust ();
5634 /* Find the block tail. The last insn in the block is the insn
5635 before a barrier and/or table jump insn. */
5636 last = get_last_insn ();
5637 if (BARRIER_P (last))
5638 last = PREV_INSN (last);
5639 if (JUMP_TABLE_DATA_P (last))
5640 last = PREV_INSN (PREV_INSN (last));
5641 BB_END (bb) = last;
5643 update_bb_for_insn (bb);
5645 return bb;
5649 /* Create a basic block for initialization code. */
5651 static basic_block
5652 construct_init_block (void)
5654 basic_block init_block, first_block;
5655 edge e = NULL;
5656 int flags;
5658 /* Multiple entry points not supported yet. */
5659 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5660 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5661 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5662 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5663 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5665 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5667 /* When entry edge points to first basic block, we don't need jump,
5668 otherwise we have to jump into proper target. */
5669 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5671 tree label = gimple_block_label (e->dest);
5673 emit_jump (jump_target_rtx (label));
5674 flags = 0;
5676 else
5677 flags = EDGE_FALLTHRU;
5679 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5680 get_last_insn (),
5681 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5682 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5683 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5684 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5685 if (e)
5687 first_block = e->dest;
5688 redirect_edge_succ (e, init_block);
5689 e = make_edge (init_block, first_block, flags);
5691 else
5692 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5693 e->probability = REG_BR_PROB_BASE;
5694 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5696 update_bb_for_insn (init_block);
5697 return init_block;
5700 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5701 found in the block tree. */
5703 static void
5704 set_block_levels (tree block, int level)
5706 while (block)
5708 BLOCK_NUMBER (block) = level;
5709 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5710 block = BLOCK_CHAIN (block);
5714 /* Create a block containing landing pads and similar stuff. */
5716 static void
5717 construct_exit_block (void)
5719 rtx_insn *head = get_last_insn ();
5720 rtx_insn *end;
5721 basic_block exit_block;
5722 edge e, e2;
5723 unsigned ix;
5724 edge_iterator ei;
5725 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5726 rtx_insn *orig_end = BB_END (prev_bb);
5728 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5730 /* Make sure the locus is set to the end of the function, so that
5731 epilogue line numbers and warnings are set properly. */
5732 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5733 input_location = cfun->function_end_locus;
5735 /* Generate rtl for function exit. */
5736 expand_function_end ();
5738 end = get_last_insn ();
5739 if (head == end)
5740 return;
5741 /* While emitting the function end we could move end of the last basic
5742 block. */
5743 BB_END (prev_bb) = orig_end;
5744 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5745 head = NEXT_INSN (head);
5746 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5747 bb frequency counting will be confused. Any instructions before that
5748 label are emitted for the case where PREV_BB falls through into the
5749 exit block, so append those instructions to prev_bb in that case. */
5750 if (NEXT_INSN (head) != return_label)
5752 while (NEXT_INSN (head) != return_label)
5754 if (!NOTE_P (NEXT_INSN (head)))
5755 BB_END (prev_bb) = NEXT_INSN (head);
5756 head = NEXT_INSN (head);
5759 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5760 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5761 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5762 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5764 ix = 0;
5765 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5767 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5768 if (!(e->flags & EDGE_ABNORMAL))
5769 redirect_edge_succ (e, exit_block);
5770 else
5771 ix++;
5774 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5775 e->probability = REG_BR_PROB_BASE;
5776 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5777 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5778 if (e2 != e)
5780 e->count -= e2->count;
5781 exit_block->count -= e2->count;
5782 exit_block->frequency -= EDGE_FREQUENCY (e2);
5784 if (e->count < 0)
5785 e->count = 0;
5786 if (exit_block->count < 0)
5787 exit_block->count = 0;
5788 if (exit_block->frequency < 0)
5789 exit_block->frequency = 0;
5790 update_bb_for_insn (exit_block);
5793 /* Helper function for discover_nonconstant_array_refs.
5794 Look for ARRAY_REF nodes with non-constant indexes and mark them
5795 addressable. */
5797 static tree
5798 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5799 void *data ATTRIBUTE_UNUSED)
5801 tree t = *tp;
5803 if (IS_TYPE_OR_DECL_P (t))
5804 *walk_subtrees = 0;
5805 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5807 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5808 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5809 && (!TREE_OPERAND (t, 2)
5810 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5811 || (TREE_CODE (t) == COMPONENT_REF
5812 && (!TREE_OPERAND (t,2)
5813 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5814 || TREE_CODE (t) == BIT_FIELD_REF
5815 || TREE_CODE (t) == REALPART_EXPR
5816 || TREE_CODE (t) == IMAGPART_EXPR
5817 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5818 || CONVERT_EXPR_P (t))
5819 t = TREE_OPERAND (t, 0);
5821 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5823 t = get_base_address (t);
5824 if (t && DECL_P (t)
5825 && DECL_MODE (t) != BLKmode)
5826 TREE_ADDRESSABLE (t) = 1;
5829 *walk_subtrees = 0;
5832 return NULL_TREE;
5835 /* RTL expansion is not able to compile array references with variable
5836 offsets for arrays stored in single register. Discover such
5837 expressions and mark variables as addressable to avoid this
5838 scenario. */
5840 static void
5841 discover_nonconstant_array_refs (void)
5843 basic_block bb;
5844 gimple_stmt_iterator gsi;
5846 FOR_EACH_BB_FN (bb, cfun)
5847 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5849 gimple stmt = gsi_stmt (gsi);
5850 if (!is_gimple_debug (stmt))
5851 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5855 /* This function sets crtl->args.internal_arg_pointer to a virtual
5856 register if DRAP is needed. Local register allocator will replace
5857 virtual_incoming_args_rtx with the virtual register. */
5859 static void
5860 expand_stack_alignment (void)
5862 rtx drap_rtx;
5863 unsigned int preferred_stack_boundary;
5865 if (! SUPPORTS_STACK_ALIGNMENT)
5866 return;
5868 if (cfun->calls_alloca
5869 || cfun->has_nonlocal_label
5870 || crtl->has_nonlocal_goto)
5871 crtl->need_drap = true;
5873 /* Call update_stack_boundary here again to update incoming stack
5874 boundary. It may set incoming stack alignment to a different
5875 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5876 use the minimum incoming stack alignment to check if it is OK
5877 to perform sibcall optimization since sibcall optimization will
5878 only align the outgoing stack to incoming stack boundary. */
5879 if (targetm.calls.update_stack_boundary)
5880 targetm.calls.update_stack_boundary ();
5882 /* The incoming stack frame has to be aligned at least at
5883 parm_stack_boundary. */
5884 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5886 /* Update crtl->stack_alignment_estimated and use it later to align
5887 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5888 exceptions since callgraph doesn't collect incoming stack alignment
5889 in this case. */
5890 if (cfun->can_throw_non_call_exceptions
5891 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5892 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5893 else
5894 preferred_stack_boundary = crtl->preferred_stack_boundary;
5895 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5896 crtl->stack_alignment_estimated = preferred_stack_boundary;
5897 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5898 crtl->stack_alignment_needed = preferred_stack_boundary;
5900 gcc_assert (crtl->stack_alignment_needed
5901 <= crtl->stack_alignment_estimated);
5903 crtl->stack_realign_needed
5904 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5905 crtl->stack_realign_tried = crtl->stack_realign_needed;
5907 crtl->stack_realign_processed = true;
5909 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5910 alignment. */
5911 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5912 drap_rtx = targetm.calls.get_drap_rtx ();
5914 /* stack_realign_drap and drap_rtx must match. */
5915 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5917 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5918 if (NULL != drap_rtx)
5920 crtl->args.internal_arg_pointer = drap_rtx;
5922 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5923 needed. */
5924 fixup_tail_calls ();
5929 static void
5930 expand_main_function (void)
5932 #if (defined(INVOKE__main) \
5933 || (!defined(HAS_INIT_SECTION) \
5934 && !defined(INIT_SECTION_ASM_OP) \
5935 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5936 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5937 #endif
5941 /* Expand code to initialize the stack_protect_guard. This is invoked at
5942 the beginning of a function to be protected. */
5944 static void
5945 stack_protect_prologue (void)
5947 tree guard_decl = targetm.stack_protect_guard ();
5948 rtx x, y;
5950 x = expand_normal (crtl->stack_protect_guard);
5951 y = expand_normal (guard_decl);
5953 /* Allow the target to copy from Y to X without leaking Y into a
5954 register. */
5955 if (targetm.have_stack_protect_set ())
5956 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
5958 emit_insn (insn);
5959 return;
5962 /* Otherwise do a straight move. */
5963 emit_move_insn (x, y);
5966 /* Translate the intermediate representation contained in the CFG
5967 from GIMPLE trees to RTL.
5969 We do conversion per basic block and preserve/update the tree CFG.
5970 This implies we have to do some magic as the CFG can simultaneously
5971 consist of basic blocks containing RTL and GIMPLE trees. This can
5972 confuse the CFG hooks, so be careful to not manipulate CFG during
5973 the expansion. */
5975 namespace {
5977 const pass_data pass_data_expand =
5979 RTL_PASS, /* type */
5980 "expand", /* name */
5981 OPTGROUP_NONE, /* optinfo_flags */
5982 TV_EXPAND, /* tv_id */
5983 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5984 | PROP_gimple_lcx
5985 | PROP_gimple_lvec
5986 | PROP_gimple_lva), /* properties_required */
5987 PROP_rtl, /* properties_provided */
5988 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5989 0, /* todo_flags_start */
5990 0, /* todo_flags_finish */
5993 class pass_expand : public rtl_opt_pass
5995 public:
5996 pass_expand (gcc::context *ctxt)
5997 : rtl_opt_pass (pass_data_expand, ctxt)
6000 /* opt_pass methods: */
6001 virtual unsigned int execute (function *);
6003 }; // class pass_expand
6005 unsigned int
6006 pass_expand::execute (function *fun)
6008 basic_block bb, init_block;
6009 sbitmap blocks;
6010 edge_iterator ei;
6011 edge e;
6012 rtx_insn *var_seq, *var_ret_seq;
6013 unsigned i;
6015 timevar_push (TV_OUT_OF_SSA);
6016 rewrite_out_of_ssa (&SA);
6017 timevar_pop (TV_OUT_OF_SSA);
6018 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6020 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6022 gimple_stmt_iterator gsi;
6023 FOR_EACH_BB_FN (bb, cfun)
6024 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6025 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6026 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6029 /* Make sure all values used by the optimization passes have sane
6030 defaults. */
6031 reg_renumber = 0;
6033 /* Some backends want to know that we are expanding to RTL. */
6034 currently_expanding_to_rtl = 1;
6035 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6036 free_dominance_info (CDI_DOMINATORS);
6038 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6040 if (chkp_function_instrumented_p (current_function_decl))
6041 chkp_reset_rtl_bounds ();
6043 insn_locations_init ();
6044 if (!DECL_IS_BUILTIN (current_function_decl))
6046 /* Eventually, all FEs should explicitly set function_start_locus. */
6047 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6048 set_curr_insn_location
6049 (DECL_SOURCE_LOCATION (current_function_decl));
6050 else
6051 set_curr_insn_location (fun->function_start_locus);
6053 else
6054 set_curr_insn_location (UNKNOWN_LOCATION);
6055 prologue_location = curr_insn_location ();
6057 #ifdef INSN_SCHEDULING
6058 init_sched_attrs ();
6059 #endif
6061 /* Make sure first insn is a note even if we don't want linenums.
6062 This makes sure the first insn will never be deleted.
6063 Also, final expects a note to appear there. */
6064 emit_note (NOTE_INSN_DELETED);
6066 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6067 discover_nonconstant_array_refs ();
6069 targetm.expand_to_rtl_hook ();
6070 crtl->stack_alignment_needed = STACK_BOUNDARY;
6071 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
6072 crtl->stack_alignment_estimated = 0;
6073 crtl->preferred_stack_boundary = STACK_BOUNDARY;
6074 fun->cfg->max_jumptable_ents = 0;
6076 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6077 of the function section at exapnsion time to predict distance of calls. */
6078 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6080 /* Expand the variables recorded during gimple lowering. */
6081 timevar_push (TV_VAR_EXPAND);
6082 start_sequence ();
6084 var_ret_seq = expand_used_vars ();
6086 var_seq = get_insns ();
6087 end_sequence ();
6088 timevar_pop (TV_VAR_EXPAND);
6090 /* Honor stack protection warnings. */
6091 if (warn_stack_protect)
6093 if (fun->calls_alloca)
6094 warning (OPT_Wstack_protector,
6095 "stack protector not protecting local variables: "
6096 "variable length buffer");
6097 if (has_short_buffer && !crtl->stack_protect_guard)
6098 warning (OPT_Wstack_protector,
6099 "stack protector not protecting function: "
6100 "all local arrays are less than %d bytes long",
6101 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6104 /* Set up parameters and prepare for return, for the function. */
6105 expand_function_start (current_function_decl);
6107 /* If we emitted any instructions for setting up the variables,
6108 emit them before the FUNCTION_START note. */
6109 if (var_seq)
6111 emit_insn_before (var_seq, parm_birth_insn);
6113 /* In expand_function_end we'll insert the alloca save/restore
6114 before parm_birth_insn. We've just insertted an alloca call.
6115 Adjust the pointer to match. */
6116 parm_birth_insn = var_seq;
6119 /* If we have a class containing differently aligned pointers
6120 we need to merge those into the corresponding RTL pointer
6121 alignment. */
6122 for (i = 1; i < num_ssa_names; i++)
6124 tree name = ssa_name (i);
6125 int part;
6127 if (!name
6128 /* We might have generated new SSA names in
6129 update_alias_info_with_stack_vars. They will have a NULL
6130 defining statements, and won't be part of the partitioning,
6131 so ignore those. */
6132 || !SSA_NAME_DEF_STMT (name))
6133 continue;
6134 part = var_to_partition (SA.map, name);
6135 if (part == NO_PARTITION)
6136 continue;
6138 gcc_assert (SA.partition_to_pseudo[part]);
6140 /* If this decl was marked as living in multiple places, reset
6141 this now to NULL. */
6142 tree var = SSA_NAME_VAR (name);
6143 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6144 SET_DECL_RTL (var, NULL);
6145 /* Check that the pseudos chosen by assign_parms are those of
6146 the corresponding default defs. */
6147 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6148 && (TREE_CODE (var) == PARM_DECL
6149 || TREE_CODE (var) == RESULT_DECL))
6151 rtx in = DECL_RTL_IF_SET (var);
6152 gcc_assert (in);
6153 rtx out = SA.partition_to_pseudo[part];
6154 gcc_assert (in == out || rtx_equal_p (in, out));
6158 /* If this function is `main', emit a call to `__main'
6159 to run global initializers, etc. */
6160 if (DECL_NAME (current_function_decl)
6161 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6162 && DECL_FILE_SCOPE_P (current_function_decl))
6163 expand_main_function ();
6165 /* Initialize the stack_protect_guard field. This must happen after the
6166 call to __main (if any) so that the external decl is initialized. */
6167 if (crtl->stack_protect_guard)
6168 stack_protect_prologue ();
6170 expand_phi_nodes (&SA);
6172 /* Register rtl specific functions for cfg. */
6173 rtl_register_cfg_hooks ();
6175 init_block = construct_init_block ();
6177 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6178 remaining edges later. */
6179 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6180 e->flags &= ~EDGE_EXECUTABLE;
6182 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6183 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6184 next_bb)
6185 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6187 if (MAY_HAVE_DEBUG_INSNS)
6188 expand_debug_locations ();
6190 if (deep_ter_debug_map)
6192 delete deep_ter_debug_map;
6193 deep_ter_debug_map = NULL;
6196 /* Free stuff we no longer need after GIMPLE optimizations. */
6197 free_dominance_info (CDI_DOMINATORS);
6198 free_dominance_info (CDI_POST_DOMINATORS);
6199 delete_tree_cfg_annotations ();
6201 timevar_push (TV_OUT_OF_SSA);
6202 finish_out_of_ssa (&SA);
6203 timevar_pop (TV_OUT_OF_SSA);
6205 timevar_push (TV_POST_EXPAND);
6206 /* We are no longer in SSA form. */
6207 fun->gimple_df->in_ssa_p = false;
6208 loops_state_clear (LOOP_CLOSED_SSA);
6210 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6211 conservatively to true until they are all profile aware. */
6212 delete lab_rtx_for_bb;
6213 free_histograms ();
6215 construct_exit_block ();
6216 insn_locations_finalize ();
6218 if (var_ret_seq)
6220 rtx_insn *after = return_label;
6221 rtx_insn *next = NEXT_INSN (after);
6222 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6223 after = next;
6224 emit_insn_after (var_ret_seq, after);
6227 /* Zap the tree EH table. */
6228 set_eh_throw_stmt_table (fun, NULL);
6230 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6231 split edges which edge insertions might do. */
6232 rebuild_jump_labels (get_insns ());
6234 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6235 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6237 edge e;
6238 edge_iterator ei;
6239 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6241 if (e->insns.r)
6243 rebuild_jump_labels_chain (e->insns.r);
6244 /* Put insns after parm birth, but before
6245 NOTE_INSNS_FUNCTION_BEG. */
6246 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6247 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6249 rtx_insn *insns = e->insns.r;
6250 e->insns.r = NULL;
6251 if (NOTE_P (parm_birth_insn)
6252 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6253 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6254 else
6255 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6257 else
6258 commit_one_edge_insertion (e);
6260 else
6261 ei_next (&ei);
6265 /* We're done expanding trees to RTL. */
6266 currently_expanding_to_rtl = 0;
6268 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6269 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6271 edge e;
6272 edge_iterator ei;
6273 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6275 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6276 e->flags &= ~EDGE_EXECUTABLE;
6278 /* At the moment not all abnormal edges match the RTL
6279 representation. It is safe to remove them here as
6280 find_many_sub_basic_blocks will rediscover them.
6281 In the future we should get this fixed properly. */
6282 if ((e->flags & EDGE_ABNORMAL)
6283 && !(e->flags & EDGE_SIBCALL))
6284 remove_edge (e);
6285 else
6286 ei_next (&ei);
6290 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6291 bitmap_ones (blocks);
6292 find_many_sub_basic_blocks (blocks);
6293 sbitmap_free (blocks);
6294 purge_all_dead_edges ();
6296 expand_stack_alignment ();
6298 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6299 function. */
6300 if (crtl->tail_call_emit)
6301 fixup_tail_calls ();
6303 /* After initial rtl generation, call back to finish generating
6304 exception support code. We need to do this before cleaning up
6305 the CFG as the code does not expect dead landing pads. */
6306 if (fun->eh->region_tree != NULL)
6307 finish_eh_generation ();
6309 /* Remove unreachable blocks, otherwise we cannot compute dominators
6310 which are needed for loop state verification. As a side-effect
6311 this also compacts blocks.
6312 ??? We cannot remove trivially dead insns here as for example
6313 the DRAP reg on i?86 is not magically live at this point.
6314 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6315 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6317 #ifdef ENABLE_CHECKING
6318 verify_flow_info ();
6319 #endif
6321 /* Initialize pseudos allocated for hard registers. */
6322 emit_initial_value_sets ();
6324 /* And finally unshare all RTL. */
6325 unshare_all_rtl ();
6327 /* There's no need to defer outputting this function any more; we
6328 know we want to output it. */
6329 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6331 /* Now that we're done expanding trees to RTL, we shouldn't have any
6332 more CONCATs anywhere. */
6333 generating_concat_p = 0;
6335 if (dump_file)
6337 fprintf (dump_file,
6338 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6339 /* And the pass manager will dump RTL for us. */
6342 /* If we're emitting a nested function, make sure its parent gets
6343 emitted as well. Doing otherwise confuses debug info. */
6345 tree parent;
6346 for (parent = DECL_CONTEXT (current_function_decl);
6347 parent != NULL_TREE;
6348 parent = get_containing_scope (parent))
6349 if (TREE_CODE (parent) == FUNCTION_DECL)
6350 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6353 /* We are now committed to emitting code for this function. Do any
6354 preparation, such as emitting abstract debug info for the inline
6355 before it gets mangled by optimization. */
6356 if (cgraph_function_possibly_inlined_p (current_function_decl))
6357 (*debug_hooks->outlining_inline_function) (current_function_decl);
6359 TREE_ASM_WRITTEN (current_function_decl) = 1;
6361 /* After expanding, the return labels are no longer needed. */
6362 return_label = NULL;
6363 naked_return_label = NULL;
6365 /* After expanding, the tm_restart map is no longer needed. */
6366 if (fun->gimple_df->tm_restart)
6367 fun->gimple_df->tm_restart = NULL;
6369 /* Tag the blocks with a depth number so that change_scope can find
6370 the common parent easily. */
6371 set_block_levels (DECL_INITIAL (fun->decl), 0);
6372 default_rtl_profile ();
6374 timevar_pop (TV_POST_EXPAND);
6376 return 0;
6379 } // anon namespace
6381 rtl_opt_pass *
6382 make_pass_expand (gcc::context *ctxt)
6384 return new pass_expand (ctxt);