2013-12-29 Janus Weil <janus@gcc.gnu.org>
[official-gcc.git] / gcc / cfgexpand.c
blob7a939753fdff2105e302baff40319bb5af7a8448
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "varasm.h"
29 #include "stor-layout.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "basic-block.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "bitmap.h"
38 #include "pointer-set.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "tree-eh.h"
42 #include "gimple-expr.h"
43 #include "is-a.h"
44 #include "gimple.h"
45 #include "gimple-iterator.h"
46 #include "gimple-walk.h"
47 #include "gimple-ssa.h"
48 #include "cgraph.h"
49 #include "tree-cfg.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "tree-ssanames.h"
53 #include "tree-dfa.h"
54 #include "tree-ssa.h"
55 #include "tree-pass.h"
56 #include "except.h"
57 #include "flags.h"
58 #include "diagnostic.h"
59 #include "gimple-pretty-print.h"
60 #include "toplev.h"
61 #include "debug.h"
62 #include "params.h"
63 #include "tree-inline.h"
64 #include "value-prof.h"
65 #include "target.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "sbitmap.h"
69 #include "cfgloop.h"
70 #include "regs.h" /* For reg_renumber. */
71 #include "insn-attr.h" /* For INSN_SCHEDULING. */
72 #include "asan.h"
73 #include "tree-ssa-address.h"
74 #include "recog.h"
75 #include "output.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
85 /* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87 struct ssaexpand SA;
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple currently_expanding_gimple_stmt;
93 static rtx expand_debug_expr (tree);
95 /* Return an expression tree corresponding to the RHS of GIMPLE
96 statement STMT. */
98 tree
99 gimple_assign_rhs_to_tree (gimple stmt)
101 tree t;
102 enum gimple_rhs_class grhs_class;
104 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
106 if (grhs_class == GIMPLE_TERNARY_RHS)
107 t = build3 (gimple_assign_rhs_code (stmt),
108 TREE_TYPE (gimple_assign_lhs (stmt)),
109 gimple_assign_rhs1 (stmt),
110 gimple_assign_rhs2 (stmt),
111 gimple_assign_rhs3 (stmt));
112 else if (grhs_class == GIMPLE_BINARY_RHS)
113 t = build2 (gimple_assign_rhs_code (stmt),
114 TREE_TYPE (gimple_assign_lhs (stmt)),
115 gimple_assign_rhs1 (stmt),
116 gimple_assign_rhs2 (stmt));
117 else if (grhs_class == GIMPLE_UNARY_RHS)
118 t = build1 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt));
121 else if (grhs_class == GIMPLE_SINGLE_RHS)
123 t = gimple_assign_rhs1 (stmt);
124 /* Avoid modifying this tree in place below. */
125 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
126 && gimple_location (stmt) != EXPR_LOCATION (t))
127 || (gimple_block (stmt)
128 && currently_expanding_to_rtl
129 && EXPR_P (t)))
130 t = copy_node (t);
132 else
133 gcc_unreachable ();
135 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
136 SET_EXPR_LOCATION (t, gimple_location (stmt));
138 return t;
142 #ifndef STACK_ALIGNMENT_NEEDED
143 #define STACK_ALIGNMENT_NEEDED 1
144 #endif
146 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
148 /* Associate declaration T with storage space X. If T is no
149 SSA name this is exactly SET_DECL_RTL, otherwise make the
150 partition of T associated with X. */
151 static inline void
152 set_rtl (tree t, rtx x)
154 if (TREE_CODE (t) == SSA_NAME)
156 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
157 if (x && !MEM_P (x))
158 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
159 /* For the benefit of debug information at -O0 (where vartracking
160 doesn't run) record the place also in the base DECL if it's
161 a normal variable (not a parameter). */
162 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
164 tree var = SSA_NAME_VAR (t);
165 /* If we don't yet have something recorded, just record it now. */
166 if (!DECL_RTL_SET_P (var))
167 SET_DECL_RTL (var, x);
168 /* If we have it set already to "multiple places" don't
169 change this. */
170 else if (DECL_RTL (var) == pc_rtx)
172 /* If we have something recorded and it's not the same place
173 as we want to record now, we have multiple partitions for the
174 same base variable, with different places. We can't just
175 randomly chose one, hence we have to say that we don't know.
176 This only happens with optimization, and there var-tracking
177 will figure out the right thing. */
178 else if (DECL_RTL (var) != x)
179 SET_DECL_RTL (var, pc_rtx);
182 else
183 SET_DECL_RTL (t, x);
186 /* This structure holds data relevant to one variable that will be
187 placed in a stack slot. */
188 struct stack_var
190 /* The Variable. */
191 tree decl;
193 /* Initially, the size of the variable. Later, the size of the partition,
194 if this variable becomes it's partition's representative. */
195 HOST_WIDE_INT size;
197 /* The *byte* alignment required for this variable. Or as, with the
198 size, the alignment for this partition. */
199 unsigned int alignb;
201 /* The partition representative. */
202 size_t representative;
204 /* The next stack variable in the partition, or EOC. */
205 size_t next;
207 /* The numbers of conflicting stack variables. */
208 bitmap conflicts;
211 #define EOC ((size_t)-1)
213 /* We have an array of such objects while deciding allocation. */
214 static struct stack_var *stack_vars;
215 static size_t stack_vars_alloc;
216 static size_t stack_vars_num;
217 static struct pointer_map_t *decl_to_stack_part;
219 /* Conflict bitmaps go on this obstack. This allows us to destroy
220 all of them in one big sweep. */
221 static bitmap_obstack stack_var_bitmap_obstack;
223 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
224 is non-decreasing. */
225 static size_t *stack_vars_sorted;
227 /* The phase of the stack frame. This is the known misalignment of
228 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
229 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
230 static int frame_phase;
232 /* Used during expand_used_vars to remember if we saw any decls for
233 which we'd like to enable stack smashing protection. */
234 static bool has_protected_decls;
236 /* Used during expand_used_vars. Remember if we say a character buffer
237 smaller than our cutoff threshold. Used for -Wstack-protector. */
238 static bool has_short_buffer;
240 /* Compute the byte alignment to use for DECL. Ignore alignment
241 we can't do with expected alignment of the stack boundary. */
243 static unsigned int
244 align_local_variable (tree decl)
246 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
247 DECL_ALIGN (decl) = align;
248 return align / BITS_PER_UNIT;
251 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
252 Return the frame offset. */
254 static HOST_WIDE_INT
255 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
257 HOST_WIDE_INT offset, new_frame_offset;
259 new_frame_offset = frame_offset;
260 if (FRAME_GROWS_DOWNWARD)
262 new_frame_offset -= size + frame_phase;
263 new_frame_offset &= -align;
264 new_frame_offset += frame_phase;
265 offset = new_frame_offset;
267 else
269 new_frame_offset -= frame_phase;
270 new_frame_offset += align - 1;
271 new_frame_offset &= -align;
272 new_frame_offset += frame_phase;
273 offset = new_frame_offset;
274 new_frame_offset += size;
276 frame_offset = new_frame_offset;
278 if (frame_offset_overflow (frame_offset, cfun->decl))
279 frame_offset = offset = 0;
281 return offset;
284 /* Accumulate DECL into STACK_VARS. */
286 static void
287 add_stack_var (tree decl)
289 struct stack_var *v;
291 if (stack_vars_num >= stack_vars_alloc)
293 if (stack_vars_alloc)
294 stack_vars_alloc = stack_vars_alloc * 3 / 2;
295 else
296 stack_vars_alloc = 32;
297 stack_vars
298 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
300 if (!decl_to_stack_part)
301 decl_to_stack_part = pointer_map_create ();
303 v = &stack_vars[stack_vars_num];
304 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
306 v->decl = decl;
307 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
308 /* Ensure that all variables have size, so that &a != &b for any two
309 variables that are simultaneously live. */
310 if (v->size == 0)
311 v->size = 1;
312 v->alignb = align_local_variable (SSAVAR (decl));
313 /* An alignment of zero can mightily confuse us later. */
314 gcc_assert (v->alignb != 0);
316 /* All variables are initially in their own partition. */
317 v->representative = stack_vars_num;
318 v->next = EOC;
320 /* All variables initially conflict with no other. */
321 v->conflicts = NULL;
323 /* Ensure that this decl doesn't get put onto the list twice. */
324 set_rtl (decl, pc_rtx);
326 stack_vars_num++;
329 /* Make the decls associated with luid's X and Y conflict. */
331 static void
332 add_stack_var_conflict (size_t x, size_t y)
334 struct stack_var *a = &stack_vars[x];
335 struct stack_var *b = &stack_vars[y];
336 if (!a->conflicts)
337 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
338 if (!b->conflicts)
339 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
340 bitmap_set_bit (a->conflicts, y);
341 bitmap_set_bit (b->conflicts, x);
344 /* Check whether the decls associated with luid's X and Y conflict. */
346 static bool
347 stack_var_conflict_p (size_t x, size_t y)
349 struct stack_var *a = &stack_vars[x];
350 struct stack_var *b = &stack_vars[y];
351 if (x == y)
352 return false;
353 /* Partitions containing an SSA name result from gimple registers
354 with things like unsupported modes. They are top-level and
355 hence conflict with everything else. */
356 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
357 return true;
359 if (!a->conflicts || !b->conflicts)
360 return false;
361 return bitmap_bit_p (a->conflicts, y);
364 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
365 enter its partition number into bitmap DATA. */
367 static bool
368 visit_op (gimple, tree op, tree, void *data)
370 bitmap active = (bitmap)data;
371 op = get_base_address (op);
372 if (op
373 && DECL_P (op)
374 && DECL_RTL_IF_SET (op) == pc_rtx)
376 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
377 if (v)
378 bitmap_set_bit (active, *v);
380 return false;
383 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
384 record conflicts between it and all currently active other partitions
385 from bitmap DATA. */
387 static bool
388 visit_conflict (gimple, tree op, tree, void *data)
390 bitmap active = (bitmap)data;
391 op = get_base_address (op);
392 if (op
393 && DECL_P (op)
394 && DECL_RTL_IF_SET (op) == pc_rtx)
396 size_t *v =
397 (size_t *) pointer_map_contains (decl_to_stack_part, op);
398 if (v && bitmap_set_bit (active, *v))
400 size_t num = *v;
401 bitmap_iterator bi;
402 unsigned i;
403 gcc_assert (num < stack_vars_num);
404 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
405 add_stack_var_conflict (num, i);
408 return false;
411 /* Helper routine for add_scope_conflicts, calculating the active partitions
412 at the end of BB, leaving the result in WORK. We're called to generate
413 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
414 liveness. */
416 static void
417 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
419 edge e;
420 edge_iterator ei;
421 gimple_stmt_iterator gsi;
422 walk_stmt_load_store_addr_fn visit;
424 bitmap_clear (work);
425 FOR_EACH_EDGE (e, ei, bb->preds)
426 bitmap_ior_into (work, (bitmap)e->src->aux);
428 visit = visit_op;
430 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
432 gimple stmt = gsi_stmt (gsi);
433 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
435 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
437 gimple stmt = gsi_stmt (gsi);
439 if (gimple_clobber_p (stmt))
441 tree lhs = gimple_assign_lhs (stmt);
442 size_t *v;
443 /* Nested function lowering might introduce LHSs
444 that are COMPONENT_REFs. */
445 if (TREE_CODE (lhs) != VAR_DECL)
446 continue;
447 if (DECL_RTL_IF_SET (lhs) == pc_rtx
448 && (v = (size_t *)
449 pointer_map_contains (decl_to_stack_part, lhs)))
450 bitmap_clear_bit (work, *v);
452 else if (!is_gimple_debug (stmt))
454 if (for_conflict
455 && visit == visit_op)
457 /* If this is the first real instruction in this BB we need
458 to add conflicts for everything live at this point now.
459 Unlike classical liveness for named objects we can't
460 rely on seeing a def/use of the names we're interested in.
461 There might merely be indirect loads/stores. We'd not add any
462 conflicts for such partitions. */
463 bitmap_iterator bi;
464 unsigned i;
465 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
467 struct stack_var *a = &stack_vars[i];
468 if (!a->conflicts)
469 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
470 bitmap_ior_into (a->conflicts, work);
472 visit = visit_conflict;
474 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
479 /* Generate stack partition conflicts between all partitions that are
480 simultaneously live. */
482 static void
483 add_scope_conflicts (void)
485 basic_block bb;
486 bool changed;
487 bitmap work = BITMAP_ALLOC (NULL);
488 int *rpo;
489 int n_bbs;
491 /* We approximate the live range of a stack variable by taking the first
492 mention of its name as starting point(s), and by the end-of-scope
493 death clobber added by gimplify as ending point(s) of the range.
494 This overapproximates in the case we for instance moved an address-taken
495 operation upward, without also moving a dereference to it upwards.
496 But it's conservatively correct as a variable never can hold values
497 before its name is mentioned at least once.
499 We then do a mostly classical bitmap liveness algorithm. */
501 FOR_ALL_BB_FN (bb, cfun)
502 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
504 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
505 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
507 changed = true;
508 while (changed)
510 int i;
511 changed = false;
512 for (i = 0; i < n_bbs; i++)
514 bitmap active;
515 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
516 active = (bitmap)bb->aux;
517 add_scope_conflicts_1 (bb, work, false);
518 if (bitmap_ior_into (active, work))
519 changed = true;
523 FOR_EACH_BB_FN (bb, cfun)
524 add_scope_conflicts_1 (bb, work, true);
526 free (rpo);
527 BITMAP_FREE (work);
528 FOR_ALL_BB_FN (bb, cfun)
529 BITMAP_FREE (bb->aux);
532 /* A subroutine of partition_stack_vars. A comparison function for qsort,
533 sorting an array of indices by the properties of the object. */
535 static int
536 stack_var_cmp (const void *a, const void *b)
538 size_t ia = *(const size_t *)a;
539 size_t ib = *(const size_t *)b;
540 unsigned int aligna = stack_vars[ia].alignb;
541 unsigned int alignb = stack_vars[ib].alignb;
542 HOST_WIDE_INT sizea = stack_vars[ia].size;
543 HOST_WIDE_INT sizeb = stack_vars[ib].size;
544 tree decla = stack_vars[ia].decl;
545 tree declb = stack_vars[ib].decl;
546 bool largea, largeb;
547 unsigned int uida, uidb;
549 /* Primary compare on "large" alignment. Large comes first. */
550 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
551 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
552 if (largea != largeb)
553 return (int)largeb - (int)largea;
555 /* Secondary compare on size, decreasing */
556 if (sizea > sizeb)
557 return -1;
558 if (sizea < sizeb)
559 return 1;
561 /* Tertiary compare on true alignment, decreasing. */
562 if (aligna < alignb)
563 return -1;
564 if (aligna > alignb)
565 return 1;
567 /* Final compare on ID for sort stability, increasing.
568 Two SSA names are compared by their version, SSA names come before
569 non-SSA names, and two normal decls are compared by their DECL_UID. */
570 if (TREE_CODE (decla) == SSA_NAME)
572 if (TREE_CODE (declb) == SSA_NAME)
573 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
574 else
575 return -1;
577 else if (TREE_CODE (declb) == SSA_NAME)
578 return 1;
579 else
580 uida = DECL_UID (decla), uidb = DECL_UID (declb);
581 if (uida < uidb)
582 return 1;
583 if (uida > uidb)
584 return -1;
585 return 0;
589 /* If the points-to solution *PI points to variables that are in a partition
590 together with other variables add all partition members to the pointed-to
591 variables bitmap. */
593 static void
594 add_partitioned_vars_to_ptset (struct pt_solution *pt,
595 struct pointer_map_t *decls_to_partitions,
596 struct pointer_set_t *visited, bitmap temp)
598 bitmap_iterator bi;
599 unsigned i;
600 bitmap *part;
602 if (pt->anything
603 || pt->vars == NULL
604 /* The pointed-to vars bitmap is shared, it is enough to
605 visit it once. */
606 || pointer_set_insert (visited, pt->vars))
607 return;
609 bitmap_clear (temp);
611 /* By using a temporary bitmap to store all members of the partitions
612 we have to add we make sure to visit each of the partitions only
613 once. */
614 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
615 if ((!temp
616 || !bitmap_bit_p (temp, i))
617 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
618 (void *)(size_t) i)))
619 bitmap_ior_into (temp, *part);
620 if (!bitmap_empty_p (temp))
621 bitmap_ior_into (pt->vars, temp);
624 /* Update points-to sets based on partition info, so we can use them on RTL.
625 The bitmaps representing stack partitions will be saved until expand,
626 where partitioned decls used as bases in memory expressions will be
627 rewritten. */
629 static void
630 update_alias_info_with_stack_vars (void)
632 struct pointer_map_t *decls_to_partitions = NULL;
633 size_t i, j;
634 tree var = NULL_TREE;
636 for (i = 0; i < stack_vars_num; i++)
638 bitmap part = NULL;
639 tree name;
640 struct ptr_info_def *pi;
642 /* Not interested in partitions with single variable. */
643 if (stack_vars[i].representative != i
644 || stack_vars[i].next == EOC)
645 continue;
647 if (!decls_to_partitions)
649 decls_to_partitions = pointer_map_create ();
650 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
653 /* Create an SSA_NAME that points to the partition for use
654 as base during alias-oracle queries on RTL for bases that
655 have been partitioned. */
656 if (var == NULL_TREE)
657 var = create_tmp_var (ptr_type_node, NULL);
658 name = make_ssa_name (var, NULL);
660 /* Create bitmaps representing partitions. They will be used for
661 points-to sets later, so use GGC alloc. */
662 part = BITMAP_GGC_ALLOC ();
663 for (j = i; j != EOC; j = stack_vars[j].next)
665 tree decl = stack_vars[j].decl;
666 unsigned int uid = DECL_PT_UID (decl);
667 bitmap_set_bit (part, uid);
668 *((bitmap *) pointer_map_insert (decls_to_partitions,
669 (void *)(size_t) uid)) = part;
670 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
671 decl)) = name;
672 if (TREE_ADDRESSABLE (decl))
673 TREE_ADDRESSABLE (name) = 1;
676 /* Make the SSA name point to all partition members. */
677 pi = get_ptr_info (name);
678 pt_solution_set (&pi->pt, part, false);
681 /* Make all points-to sets that contain one member of a partition
682 contain all members of the partition. */
683 if (decls_to_partitions)
685 unsigned i;
686 struct pointer_set_t *visited = pointer_set_create ();
687 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
689 for (i = 1; i < num_ssa_names; i++)
691 tree name = ssa_name (i);
692 struct ptr_info_def *pi;
694 if (name
695 && POINTER_TYPE_P (TREE_TYPE (name))
696 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
697 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
698 visited, temp);
701 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
702 decls_to_partitions, visited, temp);
704 pointer_set_destroy (visited);
705 pointer_map_destroy (decls_to_partitions);
706 BITMAP_FREE (temp);
710 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
711 partitioning algorithm. Partitions A and B are known to be non-conflicting.
712 Merge them into a single partition A. */
714 static void
715 union_stack_vars (size_t a, size_t b)
717 struct stack_var *vb = &stack_vars[b];
718 bitmap_iterator bi;
719 unsigned u;
721 gcc_assert (stack_vars[b].next == EOC);
722 /* Add B to A's partition. */
723 stack_vars[b].next = stack_vars[a].next;
724 stack_vars[b].representative = a;
725 stack_vars[a].next = b;
727 /* Update the required alignment of partition A to account for B. */
728 if (stack_vars[a].alignb < stack_vars[b].alignb)
729 stack_vars[a].alignb = stack_vars[b].alignb;
731 /* Update the interference graph and merge the conflicts. */
732 if (vb->conflicts)
734 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
735 add_stack_var_conflict (a, stack_vars[u].representative);
736 BITMAP_FREE (vb->conflicts);
740 /* A subroutine of expand_used_vars. Binpack the variables into
741 partitions constrained by the interference graph. The overall
742 algorithm used is as follows:
744 Sort the objects by size in descending order.
745 For each object A {
746 S = size(A)
747 O = 0
748 loop {
749 Look for the largest non-conflicting object B with size <= S.
750 UNION (A, B)
755 static void
756 partition_stack_vars (void)
758 size_t si, sj, n = stack_vars_num;
760 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
761 for (si = 0; si < n; ++si)
762 stack_vars_sorted[si] = si;
764 if (n == 1)
765 return;
767 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
769 for (si = 0; si < n; ++si)
771 size_t i = stack_vars_sorted[si];
772 unsigned int ialign = stack_vars[i].alignb;
773 HOST_WIDE_INT isize = stack_vars[i].size;
775 /* Ignore objects that aren't partition representatives. If we
776 see a var that is not a partition representative, it must
777 have been merged earlier. */
778 if (stack_vars[i].representative != i)
779 continue;
781 for (sj = si + 1; sj < n; ++sj)
783 size_t j = stack_vars_sorted[sj];
784 unsigned int jalign = stack_vars[j].alignb;
785 HOST_WIDE_INT jsize = stack_vars[j].size;
787 /* Ignore objects that aren't partition representatives. */
788 if (stack_vars[j].representative != j)
789 continue;
791 /* Do not mix objects of "small" (supported) alignment
792 and "large" (unsupported) alignment. */
793 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
794 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
795 break;
797 /* For Address Sanitizer do not mix objects with different
798 sizes, as the shorter vars wouldn't be adequately protected.
799 Don't do that for "large" (unsupported) alignment objects,
800 those aren't protected anyway. */
801 if ((flag_sanitize & SANITIZE_ADDRESS) && isize != jsize
802 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
803 break;
805 /* Ignore conflicting objects. */
806 if (stack_var_conflict_p (i, j))
807 continue;
809 /* UNION the objects, placing J at OFFSET. */
810 union_stack_vars (i, j);
814 update_alias_info_with_stack_vars ();
817 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
819 static void
820 dump_stack_var_partition (void)
822 size_t si, i, j, n = stack_vars_num;
824 for (si = 0; si < n; ++si)
826 i = stack_vars_sorted[si];
828 /* Skip variables that aren't partition representatives, for now. */
829 if (stack_vars[i].representative != i)
830 continue;
832 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
833 " align %u\n", (unsigned long) i, stack_vars[i].size,
834 stack_vars[i].alignb);
836 for (j = i; j != EOC; j = stack_vars[j].next)
838 fputc ('\t', dump_file);
839 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
841 fputc ('\n', dump_file);
845 /* Assign rtl to DECL at BASE + OFFSET. */
847 static void
848 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
849 HOST_WIDE_INT offset)
851 unsigned align;
852 rtx x;
854 /* If this fails, we've overflowed the stack frame. Error nicely? */
855 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
857 x = plus_constant (Pmode, base, offset);
858 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
860 if (TREE_CODE (decl) != SSA_NAME)
862 /* Set alignment we actually gave this decl if it isn't an SSA name.
863 If it is we generate stack slots only accidentally so it isn't as
864 important, we'll simply use the alignment that is already set. */
865 if (base == virtual_stack_vars_rtx)
866 offset -= frame_phase;
867 align = offset & -offset;
868 align *= BITS_PER_UNIT;
869 if (align == 0 || align > base_align)
870 align = base_align;
872 /* One would think that we could assert that we're not decreasing
873 alignment here, but (at least) the i386 port does exactly this
874 via the MINIMUM_ALIGNMENT hook. */
876 DECL_ALIGN (decl) = align;
877 DECL_USER_ALIGN (decl) = 0;
880 set_mem_attributes (x, SSAVAR (decl), true);
881 set_rtl (decl, x);
884 struct stack_vars_data
886 /* Vector of offset pairs, always end of some padding followed
887 by start of the padding that needs Address Sanitizer protection.
888 The vector is in reversed, highest offset pairs come first. */
889 vec<HOST_WIDE_INT> asan_vec;
891 /* Vector of partition representative decls in between the paddings. */
892 vec<tree> asan_decl_vec;
894 /* Base pseudo register for Address Sanitizer protected automatic vars. */
895 rtx asan_base;
897 /* Alignment needed for the Address Sanitizer protected automatic vars. */
898 unsigned int asan_alignb;
901 /* A subroutine of expand_used_vars. Give each partition representative
902 a unique location within the stack frame. Update each partition member
903 with that location. */
905 static void
906 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
908 size_t si, i, j, n = stack_vars_num;
909 HOST_WIDE_INT large_size = 0, large_alloc = 0;
910 rtx large_base = NULL;
911 unsigned large_align = 0;
912 tree decl;
914 /* Determine if there are any variables requiring "large" alignment.
915 Since these are dynamically allocated, we only process these if
916 no predicate involved. */
917 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
918 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
920 /* Find the total size of these variables. */
921 for (si = 0; si < n; ++si)
923 unsigned alignb;
925 i = stack_vars_sorted[si];
926 alignb = stack_vars[i].alignb;
928 /* Stop when we get to the first decl with "small" alignment. */
929 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
930 break;
932 /* Skip variables that aren't partition representatives. */
933 if (stack_vars[i].representative != i)
934 continue;
936 /* Skip variables that have already had rtl assigned. See also
937 add_stack_var where we perpetrate this pc_rtx hack. */
938 decl = stack_vars[i].decl;
939 if ((TREE_CODE (decl) == SSA_NAME
940 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
941 : DECL_RTL (decl)) != pc_rtx)
942 continue;
944 large_size += alignb - 1;
945 large_size &= -(HOST_WIDE_INT)alignb;
946 large_size += stack_vars[i].size;
949 /* If there were any, allocate space. */
950 if (large_size > 0)
951 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
952 large_align, true);
955 for (si = 0; si < n; ++si)
957 rtx base;
958 unsigned base_align, alignb;
959 HOST_WIDE_INT offset;
961 i = stack_vars_sorted[si];
963 /* Skip variables that aren't partition representatives, for now. */
964 if (stack_vars[i].representative != i)
965 continue;
967 /* Skip variables that have already had rtl assigned. See also
968 add_stack_var where we perpetrate this pc_rtx hack. */
969 decl = stack_vars[i].decl;
970 if ((TREE_CODE (decl) == SSA_NAME
971 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
972 : DECL_RTL (decl)) != pc_rtx)
973 continue;
975 /* Check the predicate to see whether this variable should be
976 allocated in this pass. */
977 if (pred && !pred (i))
978 continue;
980 alignb = stack_vars[i].alignb;
981 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
983 base = virtual_stack_vars_rtx;
984 if ((flag_sanitize & SANITIZE_ADDRESS) && pred)
986 HOST_WIDE_INT prev_offset = frame_offset;
987 tree repr_decl = NULL_TREE;
989 offset
990 = alloc_stack_frame_space (stack_vars[i].size
991 + ASAN_RED_ZONE_SIZE,
992 MAX (alignb, ASAN_RED_ZONE_SIZE));
993 data->asan_vec.safe_push (prev_offset);
994 data->asan_vec.safe_push (offset + stack_vars[i].size);
995 /* Find best representative of the partition.
996 Prefer those with DECL_NAME, even better
997 satisfying asan_protect_stack_decl predicate. */
998 for (j = i; j != EOC; j = stack_vars[j].next)
999 if (asan_protect_stack_decl (stack_vars[j].decl)
1000 && DECL_NAME (stack_vars[j].decl))
1002 repr_decl = stack_vars[j].decl;
1003 break;
1005 else if (repr_decl == NULL_TREE
1006 && DECL_P (stack_vars[j].decl)
1007 && DECL_NAME (stack_vars[j].decl))
1008 repr_decl = stack_vars[j].decl;
1009 if (repr_decl == NULL_TREE)
1010 repr_decl = stack_vars[i].decl;
1011 data->asan_decl_vec.safe_push (repr_decl);
1012 data->asan_alignb = MAX (data->asan_alignb, alignb);
1013 if (data->asan_base == NULL)
1014 data->asan_base = gen_reg_rtx (Pmode);
1015 base = data->asan_base;
1017 else
1018 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1019 base_align = crtl->max_used_stack_slot_alignment;
1021 else
1023 /* Large alignment is only processed in the last pass. */
1024 if (pred)
1025 continue;
1026 gcc_assert (large_base != NULL);
1028 large_alloc += alignb - 1;
1029 large_alloc &= -(HOST_WIDE_INT)alignb;
1030 offset = large_alloc;
1031 large_alloc += stack_vars[i].size;
1033 base = large_base;
1034 base_align = large_align;
1037 /* Create rtl for each variable based on their location within the
1038 partition. */
1039 for (j = i; j != EOC; j = stack_vars[j].next)
1041 expand_one_stack_var_at (stack_vars[j].decl,
1042 base, base_align,
1043 offset);
1047 gcc_assert (large_alloc == large_size);
1050 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1051 static HOST_WIDE_INT
1052 account_stack_vars (void)
1054 size_t si, j, i, n = stack_vars_num;
1055 HOST_WIDE_INT size = 0;
1057 for (si = 0; si < n; ++si)
1059 i = stack_vars_sorted[si];
1061 /* Skip variables that aren't partition representatives, for now. */
1062 if (stack_vars[i].representative != i)
1063 continue;
1065 size += stack_vars[i].size;
1066 for (j = i; j != EOC; j = stack_vars[j].next)
1067 set_rtl (stack_vars[j].decl, NULL);
1069 return size;
1072 /* A subroutine of expand_one_var. Called to immediately assign rtl
1073 to a variable to be allocated in the stack frame. */
1075 static void
1076 expand_one_stack_var (tree var)
1078 HOST_WIDE_INT size, offset;
1079 unsigned byte_align;
1081 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1082 byte_align = align_local_variable (SSAVAR (var));
1084 /* We handle highly aligned variables in expand_stack_vars. */
1085 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1087 offset = alloc_stack_frame_space (size, byte_align);
1089 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1090 crtl->max_used_stack_slot_alignment, offset);
1093 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1094 that will reside in a hard register. */
1096 static void
1097 expand_one_hard_reg_var (tree var)
1099 rest_of_decl_compilation (var, 0, 0);
1102 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1103 that will reside in a pseudo register. */
1105 static void
1106 expand_one_register_var (tree var)
1108 tree decl = SSAVAR (var);
1109 tree type = TREE_TYPE (decl);
1110 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1111 rtx x = gen_reg_rtx (reg_mode);
1113 set_rtl (var, x);
1115 /* Note if the object is a user variable. */
1116 if (!DECL_ARTIFICIAL (decl))
1117 mark_user_reg (x);
1119 if (POINTER_TYPE_P (type))
1120 mark_reg_pointer (x, get_pointer_alignment (var));
1123 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1124 has some associated error, e.g. its type is error-mark. We just need
1125 to pick something that won't crash the rest of the compiler. */
1127 static void
1128 expand_one_error_var (tree var)
1130 enum machine_mode mode = DECL_MODE (var);
1131 rtx x;
1133 if (mode == BLKmode)
1134 x = gen_rtx_MEM (BLKmode, const0_rtx);
1135 else if (mode == VOIDmode)
1136 x = const0_rtx;
1137 else
1138 x = gen_reg_rtx (mode);
1140 SET_DECL_RTL (var, x);
1143 /* A subroutine of expand_one_var. VAR is a variable that will be
1144 allocated to the local stack frame. Return true if we wish to
1145 add VAR to STACK_VARS so that it will be coalesced with other
1146 variables. Return false to allocate VAR immediately.
1148 This function is used to reduce the number of variables considered
1149 for coalescing, which reduces the size of the quadratic problem. */
1151 static bool
1152 defer_stack_allocation (tree var, bool toplevel)
1154 /* Whether the variable is small enough for immediate allocation not to be
1155 a problem with regard to the frame size. */
1156 bool smallish
1157 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1158 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1160 /* If stack protection is enabled, *all* stack variables must be deferred,
1161 so that we can re-order the strings to the top of the frame.
1162 Similarly for Address Sanitizer. */
1163 if (flag_stack_protect || (flag_sanitize & SANITIZE_ADDRESS))
1164 return true;
1166 /* We handle "large" alignment via dynamic allocation. We want to handle
1167 this extra complication in only one place, so defer them. */
1168 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1169 return true;
1171 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1172 might be detached from their block and appear at toplevel when we reach
1173 here. We want to coalesce them with variables from other blocks when
1174 the immediate contribution to the frame size would be noticeable. */
1175 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1176 return true;
1178 /* Variables declared in the outermost scope automatically conflict
1179 with every other variable. The only reason to want to defer them
1180 at all is that, after sorting, we can more efficiently pack
1181 small variables in the stack frame. Continue to defer at -O2. */
1182 if (toplevel && optimize < 2)
1183 return false;
1185 /* Without optimization, *most* variables are allocated from the
1186 stack, which makes the quadratic problem large exactly when we
1187 want compilation to proceed as quickly as possible. On the
1188 other hand, we don't want the function's stack frame size to
1189 get completely out of hand. So we avoid adding scalars and
1190 "small" aggregates to the list at all. */
1191 if (optimize == 0 && smallish)
1192 return false;
1194 return true;
1197 /* A subroutine of expand_used_vars. Expand one variable according to
1198 its flavor. Variables to be placed on the stack are not actually
1199 expanded yet, merely recorded.
1200 When REALLY_EXPAND is false, only add stack values to be allocated.
1201 Return stack usage this variable is supposed to take.
1204 static HOST_WIDE_INT
1205 expand_one_var (tree var, bool toplevel, bool really_expand)
1207 unsigned int align = BITS_PER_UNIT;
1208 tree origvar = var;
1210 var = SSAVAR (var);
1212 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1214 /* Because we don't know if VAR will be in register or on stack,
1215 we conservatively assume it will be on stack even if VAR is
1216 eventually put into register after RA pass. For non-automatic
1217 variables, which won't be on stack, we collect alignment of
1218 type and ignore user specified alignment. */
1219 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1220 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1221 TYPE_MODE (TREE_TYPE (var)),
1222 TYPE_ALIGN (TREE_TYPE (var)));
1223 else if (DECL_HAS_VALUE_EXPR_P (var)
1224 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1225 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1226 or variables which were assigned a stack slot already by
1227 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1228 changed from the offset chosen to it. */
1229 align = crtl->stack_alignment_estimated;
1230 else
1231 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1233 /* If the variable alignment is very large we'll dynamicaly allocate
1234 it, which means that in-frame portion is just a pointer. */
1235 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1236 align = POINTER_SIZE;
1239 if (SUPPORTS_STACK_ALIGNMENT
1240 && crtl->stack_alignment_estimated < align)
1242 /* stack_alignment_estimated shouldn't change after stack
1243 realign decision made */
1244 gcc_assert (!crtl->stack_realign_processed);
1245 crtl->stack_alignment_estimated = align;
1248 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1249 So here we only make sure stack_alignment_needed >= align. */
1250 if (crtl->stack_alignment_needed < align)
1251 crtl->stack_alignment_needed = align;
1252 if (crtl->max_used_stack_slot_alignment < align)
1253 crtl->max_used_stack_slot_alignment = align;
1255 if (TREE_CODE (origvar) == SSA_NAME)
1257 gcc_assert (TREE_CODE (var) != VAR_DECL
1258 || (!DECL_EXTERNAL (var)
1259 && !DECL_HAS_VALUE_EXPR_P (var)
1260 && !TREE_STATIC (var)
1261 && TREE_TYPE (var) != error_mark_node
1262 && !DECL_HARD_REGISTER (var)
1263 && really_expand));
1265 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1267 else if (DECL_EXTERNAL (var))
1269 else if (DECL_HAS_VALUE_EXPR_P (var))
1271 else if (TREE_STATIC (var))
1273 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1275 else if (TREE_TYPE (var) == error_mark_node)
1277 if (really_expand)
1278 expand_one_error_var (var);
1280 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1282 if (really_expand)
1283 expand_one_hard_reg_var (var);
1285 else if (use_register_for_decl (var))
1287 if (really_expand)
1288 expand_one_register_var (origvar);
1290 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1292 /* Reject variables which cover more than half of the address-space. */
1293 if (really_expand)
1295 error ("size of variable %q+D is too large", var);
1296 expand_one_error_var (var);
1299 else if (defer_stack_allocation (var, toplevel))
1300 add_stack_var (origvar);
1301 else
1303 if (really_expand)
1304 expand_one_stack_var (origvar);
1305 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1307 return 0;
1310 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1311 expanding variables. Those variables that can be put into registers
1312 are allocated pseudos; those that can't are put on the stack.
1314 TOPLEVEL is true if this is the outermost BLOCK. */
1316 static void
1317 expand_used_vars_for_block (tree block, bool toplevel)
1319 tree t;
1321 /* Expand all variables at this level. */
1322 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1323 if (TREE_USED (t)
1324 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1325 || !DECL_NONSHAREABLE (t)))
1326 expand_one_var (t, toplevel, true);
1328 /* Expand all variables at containing levels. */
1329 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1330 expand_used_vars_for_block (t, false);
1333 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1334 and clear TREE_USED on all local variables. */
1336 static void
1337 clear_tree_used (tree block)
1339 tree t;
1341 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1342 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1343 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1344 || !DECL_NONSHAREABLE (t))
1345 TREE_USED (t) = 0;
1347 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1348 clear_tree_used (t);
1351 enum {
1352 SPCT_FLAG_DEFAULT = 1,
1353 SPCT_FLAG_ALL = 2,
1354 SPCT_FLAG_STRONG = 3
1357 /* Examine TYPE and determine a bit mask of the following features. */
1359 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1360 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1361 #define SPCT_HAS_ARRAY 4
1362 #define SPCT_HAS_AGGREGATE 8
1364 static unsigned int
1365 stack_protect_classify_type (tree type)
1367 unsigned int ret = 0;
1368 tree t;
1370 switch (TREE_CODE (type))
1372 case ARRAY_TYPE:
1373 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1374 if (t == char_type_node
1375 || t == signed_char_type_node
1376 || t == unsigned_char_type_node)
1378 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1379 unsigned HOST_WIDE_INT len;
1381 if (!TYPE_SIZE_UNIT (type)
1382 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1383 len = max;
1384 else
1385 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1387 if (len < max)
1388 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1389 else
1390 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1392 else
1393 ret = SPCT_HAS_ARRAY;
1394 break;
1396 case UNION_TYPE:
1397 case QUAL_UNION_TYPE:
1398 case RECORD_TYPE:
1399 ret = SPCT_HAS_AGGREGATE;
1400 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1401 if (TREE_CODE (t) == FIELD_DECL)
1402 ret |= stack_protect_classify_type (TREE_TYPE (t));
1403 break;
1405 default:
1406 break;
1409 return ret;
1412 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1413 part of the local stack frame. Remember if we ever return nonzero for
1414 any variable in this function. The return value is the phase number in
1415 which the variable should be allocated. */
1417 static int
1418 stack_protect_decl_phase (tree decl)
1420 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1421 int ret = 0;
1423 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1424 has_short_buffer = true;
1426 if (flag_stack_protect == SPCT_FLAG_ALL
1427 || flag_stack_protect == SPCT_FLAG_STRONG)
1429 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1430 && !(bits & SPCT_HAS_AGGREGATE))
1431 ret = 1;
1432 else if (bits & SPCT_HAS_ARRAY)
1433 ret = 2;
1435 else
1436 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1438 if (ret)
1439 has_protected_decls = true;
1441 return ret;
1444 /* Two helper routines that check for phase 1 and phase 2. These are used
1445 as callbacks for expand_stack_vars. */
1447 static bool
1448 stack_protect_decl_phase_1 (size_t i)
1450 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1453 static bool
1454 stack_protect_decl_phase_2 (size_t i)
1456 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1459 /* And helper function that checks for asan phase (with stack protector
1460 it is phase 3). This is used as callback for expand_stack_vars.
1461 Returns true if any of the vars in the partition need to be protected. */
1463 static bool
1464 asan_decl_phase_3 (size_t i)
1466 while (i != EOC)
1468 if (asan_protect_stack_decl (stack_vars[i].decl))
1469 return true;
1470 i = stack_vars[i].next;
1472 return false;
1475 /* Ensure that variables in different stack protection phases conflict
1476 so that they are not merged and share the same stack slot. */
1478 static void
1479 add_stack_protection_conflicts (void)
1481 size_t i, j, n = stack_vars_num;
1482 unsigned char *phase;
1484 phase = XNEWVEC (unsigned char, n);
1485 for (i = 0; i < n; ++i)
1486 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1488 for (i = 0; i < n; ++i)
1490 unsigned char ph_i = phase[i];
1491 for (j = i + 1; j < n; ++j)
1492 if (ph_i != phase[j])
1493 add_stack_var_conflict (i, j);
1496 XDELETEVEC (phase);
1499 /* Create a decl for the guard at the top of the stack frame. */
1501 static void
1502 create_stack_guard (void)
1504 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1505 VAR_DECL, NULL, ptr_type_node);
1506 TREE_THIS_VOLATILE (guard) = 1;
1507 TREE_USED (guard) = 1;
1508 expand_one_stack_var (guard);
1509 crtl->stack_protect_guard = guard;
1512 /* Prepare for expanding variables. */
1513 static void
1514 init_vars_expansion (void)
1516 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1517 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1519 /* A map from decl to stack partition. */
1520 decl_to_stack_part = pointer_map_create ();
1522 /* Initialize local stack smashing state. */
1523 has_protected_decls = false;
1524 has_short_buffer = false;
1527 /* Free up stack variable graph data. */
1528 static void
1529 fini_vars_expansion (void)
1531 bitmap_obstack_release (&stack_var_bitmap_obstack);
1532 if (stack_vars)
1533 XDELETEVEC (stack_vars);
1534 if (stack_vars_sorted)
1535 XDELETEVEC (stack_vars_sorted);
1536 stack_vars = NULL;
1537 stack_vars_sorted = NULL;
1538 stack_vars_alloc = stack_vars_num = 0;
1539 pointer_map_destroy (decl_to_stack_part);
1540 decl_to_stack_part = NULL;
1543 /* Make a fair guess for the size of the stack frame of the function
1544 in NODE. This doesn't have to be exact, the result is only used in
1545 the inline heuristics. So we don't want to run the full stack var
1546 packing algorithm (which is quadratic in the number of stack vars).
1547 Instead, we calculate the total size of all stack vars. This turns
1548 out to be a pretty fair estimate -- packing of stack vars doesn't
1549 happen very often. */
1551 HOST_WIDE_INT
1552 estimated_stack_frame_size (struct cgraph_node *node)
1554 HOST_WIDE_INT size = 0;
1555 size_t i;
1556 tree var;
1557 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1559 push_cfun (fn);
1561 init_vars_expansion ();
1563 FOR_EACH_LOCAL_DECL (fn, i, var)
1564 if (auto_var_in_fn_p (var, fn->decl))
1565 size += expand_one_var (var, true, false);
1567 if (stack_vars_num > 0)
1569 /* Fake sorting the stack vars for account_stack_vars (). */
1570 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1571 for (i = 0; i < stack_vars_num; ++i)
1572 stack_vars_sorted[i] = i;
1573 size += account_stack_vars ();
1576 fini_vars_expansion ();
1577 pop_cfun ();
1578 return size;
1581 /* Helper routine to check if a record or union contains an array field. */
1583 static int
1584 record_or_union_type_has_array_p (const_tree tree_type)
1586 tree fields = TYPE_FIELDS (tree_type);
1587 tree f;
1589 for (f = fields; f; f = DECL_CHAIN (f))
1590 if (TREE_CODE (f) == FIELD_DECL)
1592 tree field_type = TREE_TYPE (f);
1593 if (RECORD_OR_UNION_TYPE_P (field_type)
1594 && record_or_union_type_has_array_p (field_type))
1595 return 1;
1596 if (TREE_CODE (field_type) == ARRAY_TYPE)
1597 return 1;
1599 return 0;
1602 /* Expand all variables used in the function. */
1604 static rtx
1605 expand_used_vars (void)
1607 tree var, outer_block = DECL_INITIAL (current_function_decl);
1608 vec<tree> maybe_local_decls = vNULL;
1609 rtx var_end_seq = NULL_RTX;
1610 struct pointer_map_t *ssa_name_decls;
1611 unsigned i;
1612 unsigned len;
1613 bool gen_stack_protect_signal = false;
1615 /* Compute the phase of the stack frame for this function. */
1617 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1618 int off = STARTING_FRAME_OFFSET % align;
1619 frame_phase = off ? align - off : 0;
1622 /* Set TREE_USED on all variables in the local_decls. */
1623 FOR_EACH_LOCAL_DECL (cfun, i, var)
1624 TREE_USED (var) = 1;
1625 /* Clear TREE_USED on all variables associated with a block scope. */
1626 clear_tree_used (DECL_INITIAL (current_function_decl));
1628 init_vars_expansion ();
1630 ssa_name_decls = pointer_map_create ();
1631 for (i = 0; i < SA.map->num_partitions; i++)
1633 tree var = partition_to_var (SA.map, i);
1635 gcc_assert (!virtual_operand_p (var));
1637 /* Assign decls to each SSA name partition, share decls for partitions
1638 we could have coalesced (those with the same type). */
1639 if (SSA_NAME_VAR (var) == NULL_TREE)
1641 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1642 if (!*slot)
1643 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1644 replace_ssa_name_symbol (var, (tree) *slot);
1647 /* Always allocate space for partitions based on VAR_DECLs. But for
1648 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1649 debug info, there is no need to do so if optimization is disabled
1650 because all the SSA_NAMEs based on these DECLs have been coalesced
1651 into a single partition, which is thus assigned the canonical RTL
1652 location of the DECLs. */
1653 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1654 expand_one_var (var, true, true);
1655 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize)
1657 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1658 contain the default def (representing the parm or result itself)
1659 we don't do anything here. But those which don't contain the
1660 default def (representing a temporary based on the parm/result)
1661 we need to allocate space just like for normal VAR_DECLs. */
1662 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1664 expand_one_var (var, true, true);
1665 gcc_assert (SA.partition_to_pseudo[i]);
1669 pointer_map_destroy (ssa_name_decls);
1671 if (flag_stack_protect == SPCT_FLAG_STRONG)
1672 FOR_EACH_LOCAL_DECL (cfun, i, var)
1673 if (!is_global_var (var))
1675 tree var_type = TREE_TYPE (var);
1676 /* Examine local referenced variables that have their addresses taken,
1677 contain an array, or are arrays. */
1678 if (TREE_CODE (var) == VAR_DECL
1679 && (TREE_CODE (var_type) == ARRAY_TYPE
1680 || TREE_ADDRESSABLE (var)
1681 || (RECORD_OR_UNION_TYPE_P (var_type)
1682 && record_or_union_type_has_array_p (var_type))))
1684 gen_stack_protect_signal = true;
1685 break;
1689 /* At this point all variables on the local_decls with TREE_USED
1690 set are not associated with any block scope. Lay them out. */
1692 len = vec_safe_length (cfun->local_decls);
1693 FOR_EACH_LOCAL_DECL (cfun, i, var)
1695 bool expand_now = false;
1697 /* Expanded above already. */
1698 if (is_gimple_reg (var))
1700 TREE_USED (var) = 0;
1701 goto next;
1703 /* We didn't set a block for static or extern because it's hard
1704 to tell the difference between a global variable (re)declared
1705 in a local scope, and one that's really declared there to
1706 begin with. And it doesn't really matter much, since we're
1707 not giving them stack space. Expand them now. */
1708 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1709 expand_now = true;
1711 /* Expand variables not associated with any block now. Those created by
1712 the optimizers could be live anywhere in the function. Those that
1713 could possibly have been scoped originally and detached from their
1714 block will have their allocation deferred so we coalesce them with
1715 others when optimization is enabled. */
1716 else if (TREE_USED (var))
1717 expand_now = true;
1719 /* Finally, mark all variables on the list as used. We'll use
1720 this in a moment when we expand those associated with scopes. */
1721 TREE_USED (var) = 1;
1723 if (expand_now)
1724 expand_one_var (var, true, true);
1726 next:
1727 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1729 rtx rtl = DECL_RTL_IF_SET (var);
1731 /* Keep artificial non-ignored vars in cfun->local_decls
1732 chain until instantiate_decls. */
1733 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1734 add_local_decl (cfun, var);
1735 else if (rtl == NULL_RTX)
1736 /* If rtl isn't set yet, which can happen e.g. with
1737 -fstack-protector, retry before returning from this
1738 function. */
1739 maybe_local_decls.safe_push (var);
1743 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1745 +-----------------+-----------------+
1746 | ...processed... | ...duplicates...|
1747 +-----------------+-----------------+
1749 +-- LEN points here.
1751 We just want the duplicates, as those are the artificial
1752 non-ignored vars that we want to keep until instantiate_decls.
1753 Move them down and truncate the array. */
1754 if (!vec_safe_is_empty (cfun->local_decls))
1755 cfun->local_decls->block_remove (0, len);
1757 /* At this point, all variables within the block tree with TREE_USED
1758 set are actually used by the optimized function. Lay them out. */
1759 expand_used_vars_for_block (outer_block, true);
1761 if (stack_vars_num > 0)
1763 add_scope_conflicts ();
1765 /* If stack protection is enabled, we don't share space between
1766 vulnerable data and non-vulnerable data. */
1767 if (flag_stack_protect)
1768 add_stack_protection_conflicts ();
1770 /* Now that we have collected all stack variables, and have computed a
1771 minimal interference graph, attempt to save some stack space. */
1772 partition_stack_vars ();
1773 if (dump_file)
1774 dump_stack_var_partition ();
1777 switch (flag_stack_protect)
1779 case SPCT_FLAG_ALL:
1780 create_stack_guard ();
1781 break;
1783 case SPCT_FLAG_STRONG:
1784 if (gen_stack_protect_signal
1785 || cfun->calls_alloca || has_protected_decls)
1786 create_stack_guard ();
1787 break;
1789 case SPCT_FLAG_DEFAULT:
1790 if (cfun->calls_alloca || has_protected_decls)
1791 create_stack_guard ();
1792 break;
1794 default:
1798 /* Assign rtl to each variable based on these partitions. */
1799 if (stack_vars_num > 0)
1801 struct stack_vars_data data;
1803 data.asan_vec = vNULL;
1804 data.asan_decl_vec = vNULL;
1805 data.asan_base = NULL_RTX;
1806 data.asan_alignb = 0;
1808 /* Reorder decls to be protected by iterating over the variables
1809 array multiple times, and allocating out of each phase in turn. */
1810 /* ??? We could probably integrate this into the qsort we did
1811 earlier, such that we naturally see these variables first,
1812 and thus naturally allocate things in the right order. */
1813 if (has_protected_decls)
1815 /* Phase 1 contains only character arrays. */
1816 expand_stack_vars (stack_protect_decl_phase_1, &data);
1818 /* Phase 2 contains other kinds of arrays. */
1819 if (flag_stack_protect == 2)
1820 expand_stack_vars (stack_protect_decl_phase_2, &data);
1823 if (flag_sanitize & SANITIZE_ADDRESS)
1824 /* Phase 3, any partitions that need asan protection
1825 in addition to phase 1 and 2. */
1826 expand_stack_vars (asan_decl_phase_3, &data);
1828 if (!data.asan_vec.is_empty ())
1830 HOST_WIDE_INT prev_offset = frame_offset;
1831 HOST_WIDE_INT offset, sz, redzonesz;
1832 redzonesz = ASAN_RED_ZONE_SIZE;
1833 sz = data.asan_vec[0] - prev_offset;
1834 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1835 && data.asan_alignb <= 4096
1836 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1837 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1838 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1839 offset
1840 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1841 data.asan_vec.safe_push (prev_offset);
1842 data.asan_vec.safe_push (offset);
1844 var_end_seq
1845 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1846 data.asan_base,
1847 data.asan_alignb,
1848 data.asan_vec.address (),
1849 data.asan_decl_vec.address (),
1850 data.asan_vec.length ());
1853 expand_stack_vars (NULL, &data);
1855 data.asan_vec.release ();
1856 data.asan_decl_vec.release ();
1859 fini_vars_expansion ();
1861 /* If there were any artificial non-ignored vars without rtl
1862 found earlier, see if deferred stack allocation hasn't assigned
1863 rtl to them. */
1864 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1866 rtx rtl = DECL_RTL_IF_SET (var);
1868 /* Keep artificial non-ignored vars in cfun->local_decls
1869 chain until instantiate_decls. */
1870 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1871 add_local_decl (cfun, var);
1873 maybe_local_decls.release ();
1875 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1876 if (STACK_ALIGNMENT_NEEDED)
1878 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1879 if (!FRAME_GROWS_DOWNWARD)
1880 frame_offset += align - 1;
1881 frame_offset &= -align;
1884 return var_end_seq;
1888 /* If we need to produce a detailed dump, print the tree representation
1889 for STMT to the dump file. SINCE is the last RTX after which the RTL
1890 generated for STMT should have been appended. */
1892 static void
1893 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1895 if (dump_file && (dump_flags & TDF_DETAILS))
1897 fprintf (dump_file, "\n;; ");
1898 print_gimple_stmt (dump_file, stmt, 0,
1899 TDF_SLIM | (dump_flags & TDF_LINENO));
1900 fprintf (dump_file, "\n");
1902 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1906 /* Maps the blocks that do not contain tree labels to rtx labels. */
1908 static struct pointer_map_t *lab_rtx_for_bb;
1910 /* Returns the label_rtx expression for a label starting basic block BB. */
1912 static rtx
1913 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1915 gimple_stmt_iterator gsi;
1916 tree lab;
1917 gimple lab_stmt;
1918 void **elt;
1920 if (bb->flags & BB_RTL)
1921 return block_label (bb);
1923 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1924 if (elt)
1925 return (rtx) *elt;
1927 /* Find the tree label if it is present. */
1929 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1931 lab_stmt = gsi_stmt (gsi);
1932 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1933 break;
1935 lab = gimple_label_label (lab_stmt);
1936 if (DECL_NONLOCAL (lab))
1937 break;
1939 return label_rtx (lab);
1942 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1943 *elt = gen_label_rtx ();
1944 return (rtx) *elt;
1948 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1949 of a basic block where we just expanded the conditional at the end,
1950 possibly clean up the CFG and instruction sequence. LAST is the
1951 last instruction before the just emitted jump sequence. */
1953 static void
1954 maybe_cleanup_end_of_block (edge e, rtx last)
1956 /* Special case: when jumpif decides that the condition is
1957 trivial it emits an unconditional jump (and the necessary
1958 barrier). But we still have two edges, the fallthru one is
1959 wrong. purge_dead_edges would clean this up later. Unfortunately
1960 we have to insert insns (and split edges) before
1961 find_many_sub_basic_blocks and hence before purge_dead_edges.
1962 But splitting edges might create new blocks which depend on the
1963 fact that if there are two edges there's no barrier. So the
1964 barrier would get lost and verify_flow_info would ICE. Instead
1965 of auditing all edge splitters to care for the barrier (which
1966 normally isn't there in a cleaned CFG), fix it here. */
1967 if (BARRIER_P (get_last_insn ()))
1969 rtx insn;
1970 remove_edge (e);
1971 /* Now, we have a single successor block, if we have insns to
1972 insert on the remaining edge we potentially will insert
1973 it at the end of this block (if the dest block isn't feasible)
1974 in order to avoid splitting the edge. This insertion will take
1975 place in front of the last jump. But we might have emitted
1976 multiple jumps (conditional and one unconditional) to the
1977 same destination. Inserting in front of the last one then
1978 is a problem. See PR 40021. We fix this by deleting all
1979 jumps except the last unconditional one. */
1980 insn = PREV_INSN (get_last_insn ());
1981 /* Make sure we have an unconditional jump. Otherwise we're
1982 confused. */
1983 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1984 for (insn = PREV_INSN (insn); insn != last;)
1986 insn = PREV_INSN (insn);
1987 if (JUMP_P (NEXT_INSN (insn)))
1989 if (!any_condjump_p (NEXT_INSN (insn)))
1991 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1992 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1994 delete_insn (NEXT_INSN (insn));
2000 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2001 Returns a new basic block if we've terminated the current basic
2002 block and created a new one. */
2004 static basic_block
2005 expand_gimple_cond (basic_block bb, gimple stmt)
2007 basic_block new_bb, dest;
2008 edge new_edge;
2009 edge true_edge;
2010 edge false_edge;
2011 rtx last2, last;
2012 enum tree_code code;
2013 tree op0, op1;
2015 code = gimple_cond_code (stmt);
2016 op0 = gimple_cond_lhs (stmt);
2017 op1 = gimple_cond_rhs (stmt);
2018 /* We're sometimes presented with such code:
2019 D.123_1 = x < y;
2020 if (D.123_1 != 0)
2022 This would expand to two comparisons which then later might
2023 be cleaned up by combine. But some pattern matchers like if-conversion
2024 work better when there's only one compare, so make up for this
2025 here as special exception if TER would have made the same change. */
2026 if (SA.values
2027 && TREE_CODE (op0) == SSA_NAME
2028 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2029 && TREE_CODE (op1) == INTEGER_CST
2030 && ((gimple_cond_code (stmt) == NE_EXPR
2031 && integer_zerop (op1))
2032 || (gimple_cond_code (stmt) == EQ_EXPR
2033 && integer_onep (op1)))
2034 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2036 gimple second = SSA_NAME_DEF_STMT (op0);
2037 if (gimple_code (second) == GIMPLE_ASSIGN)
2039 enum tree_code code2 = gimple_assign_rhs_code (second);
2040 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2042 code = code2;
2043 op0 = gimple_assign_rhs1 (second);
2044 op1 = gimple_assign_rhs2 (second);
2046 /* If jumps are cheap turn some more codes into
2047 jumpy sequences. */
2048 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2050 if ((code2 == BIT_AND_EXPR
2051 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2052 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2053 || code2 == TRUTH_AND_EXPR)
2055 code = TRUTH_ANDIF_EXPR;
2056 op0 = gimple_assign_rhs1 (second);
2057 op1 = gimple_assign_rhs2 (second);
2059 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2061 code = TRUTH_ORIF_EXPR;
2062 op0 = gimple_assign_rhs1 (second);
2063 op1 = gimple_assign_rhs2 (second);
2069 last2 = last = get_last_insn ();
2071 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2072 set_curr_insn_location (gimple_location (stmt));
2074 /* These flags have no purpose in RTL land. */
2075 true_edge->flags &= ~EDGE_TRUE_VALUE;
2076 false_edge->flags &= ~EDGE_FALSE_VALUE;
2078 /* We can either have a pure conditional jump with one fallthru edge or
2079 two-way jump that needs to be decomposed into two basic blocks. */
2080 if (false_edge->dest == bb->next_bb)
2082 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2083 true_edge->probability);
2084 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2085 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2086 set_curr_insn_location (true_edge->goto_locus);
2087 false_edge->flags |= EDGE_FALLTHRU;
2088 maybe_cleanup_end_of_block (false_edge, last);
2089 return NULL;
2091 if (true_edge->dest == bb->next_bb)
2093 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2094 false_edge->probability);
2095 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2096 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2097 set_curr_insn_location (false_edge->goto_locus);
2098 true_edge->flags |= EDGE_FALLTHRU;
2099 maybe_cleanup_end_of_block (true_edge, last);
2100 return NULL;
2103 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2104 true_edge->probability);
2105 last = get_last_insn ();
2106 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2107 set_curr_insn_location (false_edge->goto_locus);
2108 emit_jump (label_rtx_for_bb (false_edge->dest));
2110 BB_END (bb) = last;
2111 if (BARRIER_P (BB_END (bb)))
2112 BB_END (bb) = PREV_INSN (BB_END (bb));
2113 update_bb_for_insn (bb);
2115 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2116 dest = false_edge->dest;
2117 redirect_edge_succ (false_edge, new_bb);
2118 false_edge->flags |= EDGE_FALLTHRU;
2119 new_bb->count = false_edge->count;
2120 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2121 if (current_loops && bb->loop_father)
2122 add_bb_to_loop (new_bb, bb->loop_father);
2123 new_edge = make_edge (new_bb, dest, 0);
2124 new_edge->probability = REG_BR_PROB_BASE;
2125 new_edge->count = new_bb->count;
2126 if (BARRIER_P (BB_END (new_bb)))
2127 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2128 update_bb_for_insn (new_bb);
2130 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2132 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2134 set_curr_insn_location (true_edge->goto_locus);
2135 true_edge->goto_locus = curr_insn_location ();
2138 return new_bb;
2141 /* Mark all calls that can have a transaction restart. */
2143 static void
2144 mark_transaction_restart_calls (gimple stmt)
2146 struct tm_restart_node dummy;
2147 void **slot;
2149 if (!cfun->gimple_df->tm_restart)
2150 return;
2152 dummy.stmt = stmt;
2153 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2154 if (slot)
2156 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2157 tree list = n->label_or_list;
2158 rtx insn;
2160 for (insn = next_real_insn (get_last_insn ());
2161 !CALL_P (insn);
2162 insn = next_real_insn (insn))
2163 continue;
2165 if (TREE_CODE (list) == LABEL_DECL)
2166 add_reg_note (insn, REG_TM, label_rtx (list));
2167 else
2168 for (; list ; list = TREE_CHAIN (list))
2169 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2173 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2174 statement STMT. */
2176 static void
2177 expand_call_stmt (gimple stmt)
2179 tree exp, decl, lhs;
2180 bool builtin_p;
2181 size_t i;
2183 if (gimple_call_internal_p (stmt))
2185 expand_internal_call (stmt);
2186 return;
2189 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2191 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2192 decl = gimple_call_fndecl (stmt);
2193 builtin_p = decl && DECL_BUILT_IN (decl);
2195 /* If this is not a builtin function, the function type through which the
2196 call is made may be different from the type of the function. */
2197 if (!builtin_p)
2198 CALL_EXPR_FN (exp)
2199 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2200 CALL_EXPR_FN (exp));
2202 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2203 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2205 for (i = 0; i < gimple_call_num_args (stmt); i++)
2207 tree arg = gimple_call_arg (stmt, i);
2208 gimple def;
2209 /* TER addresses into arguments of builtin functions so we have a
2210 chance to infer more correct alignment information. See PR39954. */
2211 if (builtin_p
2212 && TREE_CODE (arg) == SSA_NAME
2213 && (def = get_gimple_for_ssa_name (arg))
2214 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2215 arg = gimple_assign_rhs1 (def);
2216 CALL_EXPR_ARG (exp, i) = arg;
2219 if (gimple_has_side_effects (stmt))
2220 TREE_SIDE_EFFECTS (exp) = 1;
2222 if (gimple_call_nothrow_p (stmt))
2223 TREE_NOTHROW (exp) = 1;
2225 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2226 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2227 if (decl
2228 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2229 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2230 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2231 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2232 else
2233 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2234 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2235 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2237 /* Ensure RTL is created for debug args. */
2238 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2240 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2241 unsigned int ix;
2242 tree dtemp;
2244 if (debug_args)
2245 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2247 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2248 expand_debug_expr (dtemp);
2252 lhs = gimple_call_lhs (stmt);
2253 if (lhs)
2254 expand_assignment (lhs, exp, false);
2255 else
2256 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2258 mark_transaction_restart_calls (stmt);
2262 /* Generate RTL for an asm statement (explicit assembler code).
2263 STRING is a STRING_CST node containing the assembler code text,
2264 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2265 insn is volatile; don't optimize it. */
2267 static void
2268 expand_asm_loc (tree string, int vol, location_t locus)
2270 rtx body;
2272 if (TREE_CODE (string) == ADDR_EXPR)
2273 string = TREE_OPERAND (string, 0);
2275 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2276 ggc_strdup (TREE_STRING_POINTER (string)),
2277 locus);
2279 MEM_VOLATILE_P (body) = vol;
2281 emit_insn (body);
2284 /* Return the number of times character C occurs in string S. */
2285 static int
2286 n_occurrences (int c, const char *s)
2288 int n = 0;
2289 while (*s)
2290 n += (*s++ == c);
2291 return n;
2294 /* A subroutine of expand_asm_operands. Check that all operands have
2295 the same number of alternatives. Return true if so. */
2297 static bool
2298 check_operand_nalternatives (tree outputs, tree inputs)
2300 if (outputs || inputs)
2302 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2303 int nalternatives
2304 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2305 tree next = inputs;
2307 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2309 error ("too many alternatives in %<asm%>");
2310 return false;
2313 tmp = outputs;
2314 while (tmp)
2316 const char *constraint
2317 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2319 if (n_occurrences (',', constraint) != nalternatives)
2321 error ("operand constraints for %<asm%> differ "
2322 "in number of alternatives");
2323 return false;
2326 if (TREE_CHAIN (tmp))
2327 tmp = TREE_CHAIN (tmp);
2328 else
2329 tmp = next, next = 0;
2333 return true;
2336 /* Check for overlap between registers marked in CLOBBERED_REGS and
2337 anything inappropriate in T. Emit error and return the register
2338 variable definition for error, NULL_TREE for ok. */
2340 static bool
2341 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2343 /* Conflicts between asm-declared register variables and the clobber
2344 list are not allowed. */
2345 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2347 if (overlap)
2349 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2350 DECL_NAME (overlap));
2352 /* Reset registerness to stop multiple errors emitted for a single
2353 variable. */
2354 DECL_REGISTER (overlap) = 0;
2355 return true;
2358 return false;
2361 /* Generate RTL for an asm statement with arguments.
2362 STRING is the instruction template.
2363 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2364 Each output or input has an expression in the TREE_VALUE and
2365 a tree list in TREE_PURPOSE which in turn contains a constraint
2366 name in TREE_VALUE (or NULL_TREE) and a constraint string
2367 in TREE_PURPOSE.
2368 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2369 that is clobbered by this insn.
2371 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2372 should be the fallthru basic block of the asm goto.
2374 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2375 Some elements of OUTPUTS may be replaced with trees representing temporary
2376 values. The caller should copy those temporary values to the originally
2377 specified lvalues.
2379 VOL nonzero means the insn is volatile; don't optimize it. */
2381 static void
2382 expand_asm_operands (tree string, tree outputs, tree inputs,
2383 tree clobbers, tree labels, basic_block fallthru_bb,
2384 int vol, location_t locus)
2386 rtvec argvec, constraintvec, labelvec;
2387 rtx body;
2388 int ninputs = list_length (inputs);
2389 int noutputs = list_length (outputs);
2390 int nlabels = list_length (labels);
2391 int ninout;
2392 int nclobbers;
2393 HARD_REG_SET clobbered_regs;
2394 int clobber_conflict_found = 0;
2395 tree tail;
2396 tree t;
2397 int i;
2398 /* Vector of RTX's of evaluated output operands. */
2399 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2400 int *inout_opnum = XALLOCAVEC (int, noutputs);
2401 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2402 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2403 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2404 int old_generating_concat_p = generating_concat_p;
2405 rtx fallthru_label = NULL_RTX;
2407 /* An ASM with no outputs needs to be treated as volatile, for now. */
2408 if (noutputs == 0)
2409 vol = 1;
2411 if (! check_operand_nalternatives (outputs, inputs))
2412 return;
2414 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2416 /* Collect constraints. */
2417 i = 0;
2418 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2419 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2420 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2421 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2423 /* Sometimes we wish to automatically clobber registers across an asm.
2424 Case in point is when the i386 backend moved from cc0 to a hard reg --
2425 maintaining source-level compatibility means automatically clobbering
2426 the flags register. */
2427 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2429 /* Count the number of meaningful clobbered registers, ignoring what
2430 we would ignore later. */
2431 nclobbers = 0;
2432 CLEAR_HARD_REG_SET (clobbered_regs);
2433 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2435 const char *regname;
2436 int nregs;
2438 if (TREE_VALUE (tail) == error_mark_node)
2439 return;
2440 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2442 i = decode_reg_name_and_count (regname, &nregs);
2443 if (i == -4)
2444 ++nclobbers;
2445 else if (i == -2)
2446 error ("unknown register name %qs in %<asm%>", regname);
2448 /* Mark clobbered registers. */
2449 if (i >= 0)
2451 int reg;
2453 for (reg = i; reg < i + nregs; reg++)
2455 ++nclobbers;
2457 /* Clobbering the PIC register is an error. */
2458 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2460 error ("PIC register clobbered by %qs in %<asm%>", regname);
2461 return;
2464 SET_HARD_REG_BIT (clobbered_regs, reg);
2469 /* First pass over inputs and outputs checks validity and sets
2470 mark_addressable if needed. */
2472 ninout = 0;
2473 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2475 tree val = TREE_VALUE (tail);
2476 tree type = TREE_TYPE (val);
2477 const char *constraint;
2478 bool is_inout;
2479 bool allows_reg;
2480 bool allows_mem;
2482 /* If there's an erroneous arg, emit no insn. */
2483 if (type == error_mark_node)
2484 return;
2486 /* Try to parse the output constraint. If that fails, there's
2487 no point in going further. */
2488 constraint = constraints[i];
2489 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2490 &allows_mem, &allows_reg, &is_inout))
2491 return;
2493 if (! allows_reg
2494 && (allows_mem
2495 || is_inout
2496 || (DECL_P (val)
2497 && REG_P (DECL_RTL (val))
2498 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2499 mark_addressable (val);
2501 if (is_inout)
2502 ninout++;
2505 ninputs += ninout;
2506 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2508 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2509 return;
2512 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2514 bool allows_reg, allows_mem;
2515 const char *constraint;
2517 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2518 would get VOIDmode and that could cause a crash in reload. */
2519 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2520 return;
2522 constraint = constraints[i + noutputs];
2523 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2524 constraints, &allows_mem, &allows_reg))
2525 return;
2527 if (! allows_reg && allows_mem)
2528 mark_addressable (TREE_VALUE (tail));
2531 /* Second pass evaluates arguments. */
2533 /* Make sure stack is consistent for asm goto. */
2534 if (nlabels > 0)
2535 do_pending_stack_adjust ();
2537 ninout = 0;
2538 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2540 tree val = TREE_VALUE (tail);
2541 tree type = TREE_TYPE (val);
2542 bool is_inout;
2543 bool allows_reg;
2544 bool allows_mem;
2545 rtx op;
2546 bool ok;
2548 ok = parse_output_constraint (&constraints[i], i, ninputs,
2549 noutputs, &allows_mem, &allows_reg,
2550 &is_inout);
2551 gcc_assert (ok);
2553 /* If an output operand is not a decl or indirect ref and our constraint
2554 allows a register, make a temporary to act as an intermediate.
2555 Make the asm insn write into that, then our caller will copy it to
2556 the real output operand. Likewise for promoted variables. */
2558 generating_concat_p = 0;
2560 real_output_rtx[i] = NULL_RTX;
2561 if ((TREE_CODE (val) == INDIRECT_REF
2562 && allows_mem)
2563 || (DECL_P (val)
2564 && (allows_mem || REG_P (DECL_RTL (val)))
2565 && ! (REG_P (DECL_RTL (val))
2566 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2567 || ! allows_reg
2568 || is_inout)
2570 op = expand_expr (val, NULL_RTX, VOIDmode,
2571 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2572 if (MEM_P (op))
2573 op = validize_mem (op);
2575 if (! allows_reg && !MEM_P (op))
2576 error ("output number %d not directly addressable", i);
2577 if ((! allows_mem && MEM_P (op))
2578 || GET_CODE (op) == CONCAT)
2580 real_output_rtx[i] = op;
2581 op = gen_reg_rtx (GET_MODE (op));
2582 if (is_inout)
2583 emit_move_insn (op, real_output_rtx[i]);
2586 else
2588 op = assign_temp (type, 0, 1);
2589 op = validize_mem (op);
2590 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2591 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2592 TREE_VALUE (tail) = make_tree (type, op);
2594 output_rtx[i] = op;
2596 generating_concat_p = old_generating_concat_p;
2598 if (is_inout)
2600 inout_mode[ninout] = TYPE_MODE (type);
2601 inout_opnum[ninout++] = i;
2604 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2605 clobber_conflict_found = 1;
2608 /* Make vectors for the expression-rtx, constraint strings,
2609 and named operands. */
2611 argvec = rtvec_alloc (ninputs);
2612 constraintvec = rtvec_alloc (ninputs);
2613 labelvec = rtvec_alloc (nlabels);
2615 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2616 : GET_MODE (output_rtx[0])),
2617 ggc_strdup (TREE_STRING_POINTER (string)),
2618 empty_string, 0, argvec, constraintvec,
2619 labelvec, locus);
2621 MEM_VOLATILE_P (body) = vol;
2623 /* Eval the inputs and put them into ARGVEC.
2624 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2626 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2628 bool allows_reg, allows_mem;
2629 const char *constraint;
2630 tree val, type;
2631 rtx op;
2632 bool ok;
2634 constraint = constraints[i + noutputs];
2635 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2636 constraints, &allows_mem, &allows_reg);
2637 gcc_assert (ok);
2639 generating_concat_p = 0;
2641 val = TREE_VALUE (tail);
2642 type = TREE_TYPE (val);
2643 /* EXPAND_INITIALIZER will not generate code for valid initializer
2644 constants, but will still generate code for other types of operand.
2645 This is the behavior we want for constant constraints. */
2646 op = expand_expr (val, NULL_RTX, VOIDmode,
2647 allows_reg ? EXPAND_NORMAL
2648 : allows_mem ? EXPAND_MEMORY
2649 : EXPAND_INITIALIZER);
2651 /* Never pass a CONCAT to an ASM. */
2652 if (GET_CODE (op) == CONCAT)
2653 op = force_reg (GET_MODE (op), op);
2654 else if (MEM_P (op))
2655 op = validize_mem (op);
2657 if (asm_operand_ok (op, constraint, NULL) <= 0)
2659 if (allows_reg && TYPE_MODE (type) != BLKmode)
2660 op = force_reg (TYPE_MODE (type), op);
2661 else if (!allows_mem)
2662 warning (0, "asm operand %d probably doesn%'t match constraints",
2663 i + noutputs);
2664 else if (MEM_P (op))
2666 /* We won't recognize either volatile memory or memory
2667 with a queued address as available a memory_operand
2668 at this point. Ignore it: clearly this *is* a memory. */
2670 else
2671 gcc_unreachable ();
2674 generating_concat_p = old_generating_concat_p;
2675 ASM_OPERANDS_INPUT (body, i) = op;
2677 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2678 = gen_rtx_ASM_INPUT (TYPE_MODE (type),
2679 ggc_strdup (constraints[i + noutputs]));
2681 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2682 clobber_conflict_found = 1;
2685 /* Protect all the operands from the queue now that they have all been
2686 evaluated. */
2688 generating_concat_p = 0;
2690 /* For in-out operands, copy output rtx to input rtx. */
2691 for (i = 0; i < ninout; i++)
2693 int j = inout_opnum[i];
2694 char buffer[16];
2696 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2697 = output_rtx[j];
2699 sprintf (buffer, "%d", j);
2700 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2701 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
2704 /* Copy labels to the vector. */
2705 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2707 rtx r;
2708 /* If asm goto has any labels in the fallthru basic block, use
2709 a label that we emit immediately after the asm goto. Expansion
2710 may insert further instructions into the same basic block after
2711 asm goto and if we don't do this, insertion of instructions on
2712 the fallthru edge might misbehave. See PR58670. */
2713 if (fallthru_bb
2714 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2716 if (fallthru_label == NULL_RTX)
2717 fallthru_label = gen_label_rtx ();
2718 r = fallthru_label;
2720 else
2721 r = label_rtx (TREE_VALUE (tail));
2722 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2725 generating_concat_p = old_generating_concat_p;
2727 /* Now, for each output, construct an rtx
2728 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2729 ARGVEC CONSTRAINTS OPNAMES))
2730 If there is more than one, put them inside a PARALLEL. */
2732 if (nlabels > 0 && nclobbers == 0)
2734 gcc_assert (noutputs == 0);
2735 emit_jump_insn (body);
2737 else if (noutputs == 0 && nclobbers == 0)
2739 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2740 emit_insn (body);
2742 else if (noutputs == 1 && nclobbers == 0)
2744 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2745 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2747 else
2749 rtx obody = body;
2750 int num = noutputs;
2752 if (num == 0)
2753 num = 1;
2755 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2757 /* For each output operand, store a SET. */
2758 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2760 XVECEXP (body, 0, i)
2761 = gen_rtx_SET (VOIDmode,
2762 output_rtx[i],
2763 gen_rtx_ASM_OPERANDS
2764 (GET_MODE (output_rtx[i]),
2765 ggc_strdup (TREE_STRING_POINTER (string)),
2766 ggc_strdup (constraints[i]),
2767 i, argvec, constraintvec, labelvec, locus));
2769 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2772 /* If there are no outputs (but there are some clobbers)
2773 store the bare ASM_OPERANDS into the PARALLEL. */
2775 if (i == 0)
2776 XVECEXP (body, 0, i++) = obody;
2778 /* Store (clobber REG) for each clobbered register specified. */
2780 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2782 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2783 int reg, nregs;
2784 int j = decode_reg_name_and_count (regname, &nregs);
2785 rtx clobbered_reg;
2787 if (j < 0)
2789 if (j == -3) /* `cc', which is not a register */
2790 continue;
2792 if (j == -4) /* `memory', don't cache memory across asm */
2794 XVECEXP (body, 0, i++)
2795 = gen_rtx_CLOBBER (VOIDmode,
2796 gen_rtx_MEM
2797 (BLKmode,
2798 gen_rtx_SCRATCH (VOIDmode)));
2799 continue;
2802 /* Ignore unknown register, error already signaled. */
2803 continue;
2806 for (reg = j; reg < j + nregs; reg++)
2808 /* Use QImode since that's guaranteed to clobber just
2809 * one reg. */
2810 clobbered_reg = gen_rtx_REG (QImode, reg);
2812 /* Do sanity check for overlap between clobbers and
2813 respectively input and outputs that hasn't been
2814 handled. Such overlap should have been detected and
2815 reported above. */
2816 if (!clobber_conflict_found)
2818 int opno;
2820 /* We test the old body (obody) contents to avoid
2821 tripping over the under-construction body. */
2822 for (opno = 0; opno < noutputs; opno++)
2823 if (reg_overlap_mentioned_p (clobbered_reg,
2824 output_rtx[opno]))
2825 internal_error
2826 ("asm clobber conflict with output operand");
2828 for (opno = 0; opno < ninputs - ninout; opno++)
2829 if (reg_overlap_mentioned_p (clobbered_reg,
2830 ASM_OPERANDS_INPUT (obody,
2831 opno)))
2832 internal_error
2833 ("asm clobber conflict with input operand");
2836 XVECEXP (body, 0, i++)
2837 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2841 if (nlabels > 0)
2842 emit_jump_insn (body);
2843 else
2844 emit_insn (body);
2847 if (fallthru_label)
2848 emit_label (fallthru_label);
2850 /* For any outputs that needed reloading into registers, spill them
2851 back to where they belong. */
2852 for (i = 0; i < noutputs; ++i)
2853 if (real_output_rtx[i])
2854 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2856 crtl->has_asm_statement = 1;
2857 free_temp_slots ();
2861 static void
2862 expand_asm_stmt (gimple stmt)
2864 int noutputs;
2865 tree outputs, tail, t;
2866 tree *o;
2867 size_t i, n;
2868 const char *s;
2869 tree str, out, in, cl, labels;
2870 location_t locus = gimple_location (stmt);
2871 basic_block fallthru_bb = NULL;
2873 /* Meh... convert the gimple asm operands into real tree lists.
2874 Eventually we should make all routines work on the vectors instead
2875 of relying on TREE_CHAIN. */
2876 out = NULL_TREE;
2877 n = gimple_asm_noutputs (stmt);
2878 if (n > 0)
2880 t = out = gimple_asm_output_op (stmt, 0);
2881 for (i = 1; i < n; i++)
2882 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2885 in = NULL_TREE;
2886 n = gimple_asm_ninputs (stmt);
2887 if (n > 0)
2889 t = in = gimple_asm_input_op (stmt, 0);
2890 for (i = 1; i < n; i++)
2891 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2894 cl = NULL_TREE;
2895 n = gimple_asm_nclobbers (stmt);
2896 if (n > 0)
2898 t = cl = gimple_asm_clobber_op (stmt, 0);
2899 for (i = 1; i < n; i++)
2900 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2903 labels = NULL_TREE;
2904 n = gimple_asm_nlabels (stmt);
2905 if (n > 0)
2907 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2908 if (fallthru)
2909 fallthru_bb = fallthru->dest;
2910 t = labels = gimple_asm_label_op (stmt, 0);
2911 for (i = 1; i < n; i++)
2912 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2915 s = gimple_asm_string (stmt);
2916 str = build_string (strlen (s), s);
2918 if (gimple_asm_input_p (stmt))
2920 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
2921 return;
2924 outputs = out;
2925 noutputs = gimple_asm_noutputs (stmt);
2926 /* o[I] is the place that output number I should be written. */
2927 o = (tree *) alloca (noutputs * sizeof (tree));
2929 /* Record the contents of OUTPUTS before it is modified. */
2930 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2931 o[i] = TREE_VALUE (tail);
2933 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
2934 OUTPUTS some trees for where the values were actually stored. */
2935 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
2936 gimple_asm_volatile_p (stmt), locus);
2938 /* Copy all the intermediate outputs into the specified outputs. */
2939 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2941 if (o[i] != TREE_VALUE (tail))
2943 expand_assignment (o[i], TREE_VALUE (tail), false);
2944 free_temp_slots ();
2946 /* Restore the original value so that it's correct the next
2947 time we expand this function. */
2948 TREE_VALUE (tail) = o[i];
2953 /* Emit code to jump to the address
2954 specified by the pointer expression EXP. */
2956 static void
2957 expand_computed_goto (tree exp)
2959 rtx x = expand_normal (exp);
2961 x = convert_memory_address (Pmode, x);
2963 do_pending_stack_adjust ();
2964 emit_indirect_jump (x);
2967 /* Generate RTL code for a `goto' statement with target label LABEL.
2968 LABEL should be a LABEL_DECL tree node that was or will later be
2969 defined with `expand_label'. */
2971 static void
2972 expand_goto (tree label)
2974 #ifdef ENABLE_CHECKING
2975 /* Check for a nonlocal goto to a containing function. Should have
2976 gotten translated to __builtin_nonlocal_goto. */
2977 tree context = decl_function_context (label);
2978 gcc_assert (!context || context == current_function_decl);
2979 #endif
2981 emit_jump (label_rtx (label));
2984 /* Output a return with no value. */
2986 static void
2987 expand_null_return_1 (void)
2989 clear_pending_stack_adjust ();
2990 do_pending_stack_adjust ();
2991 emit_jump (return_label);
2994 /* Generate RTL to return from the current function, with no value.
2995 (That is, we do not do anything about returning any value.) */
2997 void
2998 expand_null_return (void)
3000 /* If this function was declared to return a value, but we
3001 didn't, clobber the return registers so that they are not
3002 propagated live to the rest of the function. */
3003 clobber_return_register ();
3005 expand_null_return_1 ();
3008 /* Generate RTL to return from the current function, with value VAL. */
3010 static void
3011 expand_value_return (rtx val)
3013 /* Copy the value to the return location unless it's already there. */
3015 tree decl = DECL_RESULT (current_function_decl);
3016 rtx return_reg = DECL_RTL (decl);
3017 if (return_reg != val)
3019 tree funtype = TREE_TYPE (current_function_decl);
3020 tree type = TREE_TYPE (decl);
3021 int unsignedp = TYPE_UNSIGNED (type);
3022 enum machine_mode old_mode = DECL_MODE (decl);
3023 enum machine_mode mode;
3024 if (DECL_BY_REFERENCE (decl))
3025 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3026 else
3027 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3029 if (mode != old_mode)
3030 val = convert_modes (mode, old_mode, val, unsignedp);
3032 if (GET_CODE (return_reg) == PARALLEL)
3033 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3034 else
3035 emit_move_insn (return_reg, val);
3038 expand_null_return_1 ();
3041 /* Generate RTL to evaluate the expression RETVAL and return it
3042 from the current function. */
3044 static void
3045 expand_return (tree retval)
3047 rtx result_rtl;
3048 rtx val = 0;
3049 tree retval_rhs;
3051 /* If function wants no value, give it none. */
3052 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3054 expand_normal (retval);
3055 expand_null_return ();
3056 return;
3059 if (retval == error_mark_node)
3061 /* Treat this like a return of no value from a function that
3062 returns a value. */
3063 expand_null_return ();
3064 return;
3066 else if ((TREE_CODE (retval) == MODIFY_EXPR
3067 || TREE_CODE (retval) == INIT_EXPR)
3068 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3069 retval_rhs = TREE_OPERAND (retval, 1);
3070 else
3071 retval_rhs = retval;
3073 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3075 /* If we are returning the RESULT_DECL, then the value has already
3076 been stored into it, so we don't have to do anything special. */
3077 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3078 expand_value_return (result_rtl);
3080 /* If the result is an aggregate that is being returned in one (or more)
3081 registers, load the registers here. */
3083 else if (retval_rhs != 0
3084 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3085 && REG_P (result_rtl))
3087 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3088 if (val)
3090 /* Use the mode of the result value on the return register. */
3091 PUT_MODE (result_rtl, GET_MODE (val));
3092 expand_value_return (val);
3094 else
3095 expand_null_return ();
3097 else if (retval_rhs != 0
3098 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3099 && (REG_P (result_rtl)
3100 || (GET_CODE (result_rtl) == PARALLEL)))
3102 /* Calculate the return value into a temporary (usually a pseudo
3103 reg). */
3104 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3105 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3107 val = assign_temp (nt, 0, 1);
3108 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3109 val = force_not_mem (val);
3110 /* Return the calculated value. */
3111 expand_value_return (val);
3113 else
3115 /* No hard reg used; calculate value into hard return reg. */
3116 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3117 expand_value_return (result_rtl);
3121 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3122 STMT that doesn't require special handling for outgoing edges. That
3123 is no tailcalls and no GIMPLE_COND. */
3125 static void
3126 expand_gimple_stmt_1 (gimple stmt)
3128 tree op0;
3130 set_curr_insn_location (gimple_location (stmt));
3132 switch (gimple_code (stmt))
3134 case GIMPLE_GOTO:
3135 op0 = gimple_goto_dest (stmt);
3136 if (TREE_CODE (op0) == LABEL_DECL)
3137 expand_goto (op0);
3138 else
3139 expand_computed_goto (op0);
3140 break;
3141 case GIMPLE_LABEL:
3142 expand_label (gimple_label_label (stmt));
3143 break;
3144 case GIMPLE_NOP:
3145 case GIMPLE_PREDICT:
3146 break;
3147 case GIMPLE_SWITCH:
3148 expand_case (stmt);
3149 break;
3150 case GIMPLE_ASM:
3151 expand_asm_stmt (stmt);
3152 break;
3153 case GIMPLE_CALL:
3154 expand_call_stmt (stmt);
3155 break;
3157 case GIMPLE_RETURN:
3158 op0 = gimple_return_retval (stmt);
3160 if (op0 && op0 != error_mark_node)
3162 tree result = DECL_RESULT (current_function_decl);
3164 /* If we are not returning the current function's RESULT_DECL,
3165 build an assignment to it. */
3166 if (op0 != result)
3168 /* I believe that a function's RESULT_DECL is unique. */
3169 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3171 /* ??? We'd like to use simply expand_assignment here,
3172 but this fails if the value is of BLKmode but the return
3173 decl is a register. expand_return has special handling
3174 for this combination, which eventually should move
3175 to common code. See comments there. Until then, let's
3176 build a modify expression :-/ */
3177 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3178 result, op0);
3181 if (!op0)
3182 expand_null_return ();
3183 else
3184 expand_return (op0);
3185 break;
3187 case GIMPLE_ASSIGN:
3189 tree lhs = gimple_assign_lhs (stmt);
3191 /* Tree expand used to fiddle with |= and &= of two bitfield
3192 COMPONENT_REFs here. This can't happen with gimple, the LHS
3193 of binary assigns must be a gimple reg. */
3195 if (TREE_CODE (lhs) != SSA_NAME
3196 || get_gimple_rhs_class (gimple_expr_code (stmt))
3197 == GIMPLE_SINGLE_RHS)
3199 tree rhs = gimple_assign_rhs1 (stmt);
3200 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3201 == GIMPLE_SINGLE_RHS);
3202 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3203 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3204 if (TREE_CLOBBER_P (rhs))
3205 /* This is a clobber to mark the going out of scope for
3206 this LHS. */
3208 else
3209 expand_assignment (lhs, rhs,
3210 gimple_assign_nontemporal_move_p (stmt));
3212 else
3214 rtx target, temp;
3215 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3216 struct separate_ops ops;
3217 bool promoted = false;
3219 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3220 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3221 promoted = true;
3223 ops.code = gimple_assign_rhs_code (stmt);
3224 ops.type = TREE_TYPE (lhs);
3225 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3227 case GIMPLE_TERNARY_RHS:
3228 ops.op2 = gimple_assign_rhs3 (stmt);
3229 /* Fallthru */
3230 case GIMPLE_BINARY_RHS:
3231 ops.op1 = gimple_assign_rhs2 (stmt);
3232 /* Fallthru */
3233 case GIMPLE_UNARY_RHS:
3234 ops.op0 = gimple_assign_rhs1 (stmt);
3235 break;
3236 default:
3237 gcc_unreachable ();
3239 ops.location = gimple_location (stmt);
3241 /* If we want to use a nontemporal store, force the value to
3242 register first. If we store into a promoted register,
3243 don't directly expand to target. */
3244 temp = nontemporal || promoted ? NULL_RTX : target;
3245 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3246 EXPAND_NORMAL);
3248 if (temp == target)
3250 else if (promoted)
3252 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
3253 /* If TEMP is a VOIDmode constant, use convert_modes to make
3254 sure that we properly convert it. */
3255 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3257 temp = convert_modes (GET_MODE (target),
3258 TYPE_MODE (ops.type),
3259 temp, unsignedp);
3260 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3261 GET_MODE (target), temp, unsignedp);
3264 convert_move (SUBREG_REG (target), temp, unsignedp);
3266 else if (nontemporal && emit_storent_insn (target, temp))
3268 else
3270 temp = force_operand (temp, target);
3271 if (temp != target)
3272 emit_move_insn (target, temp);
3276 break;
3278 default:
3279 gcc_unreachable ();
3283 /* Expand one gimple statement STMT and return the last RTL instruction
3284 before any of the newly generated ones.
3286 In addition to generating the necessary RTL instructions this also
3287 sets REG_EH_REGION notes if necessary and sets the current source
3288 location for diagnostics. */
3290 static rtx
3291 expand_gimple_stmt (gimple stmt)
3293 location_t saved_location = input_location;
3294 rtx last = get_last_insn ();
3295 int lp_nr;
3297 gcc_assert (cfun);
3299 /* We need to save and restore the current source location so that errors
3300 discovered during expansion are emitted with the right location. But
3301 it would be better if the diagnostic routines used the source location
3302 embedded in the tree nodes rather than globals. */
3303 if (gimple_has_location (stmt))
3304 input_location = gimple_location (stmt);
3306 expand_gimple_stmt_1 (stmt);
3308 /* Free any temporaries used to evaluate this statement. */
3309 free_temp_slots ();
3311 input_location = saved_location;
3313 /* Mark all insns that may trap. */
3314 lp_nr = lookup_stmt_eh_lp (stmt);
3315 if (lp_nr)
3317 rtx insn;
3318 for (insn = next_real_insn (last); insn;
3319 insn = next_real_insn (insn))
3321 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3322 /* If we want exceptions for non-call insns, any
3323 may_trap_p instruction may throw. */
3324 && GET_CODE (PATTERN (insn)) != CLOBBER
3325 && GET_CODE (PATTERN (insn)) != USE
3326 && insn_could_throw_p (insn))
3327 make_reg_eh_region_note (insn, 0, lp_nr);
3331 return last;
3334 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3335 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3336 generated a tail call (something that might be denied by the ABI
3337 rules governing the call; see calls.c).
3339 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3340 can still reach the rest of BB. The case here is __builtin_sqrt,
3341 where the NaN result goes through the external function (with a
3342 tailcall) and the normal result happens via a sqrt instruction. */
3344 static basic_block
3345 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
3347 rtx last2, last;
3348 edge e;
3349 edge_iterator ei;
3350 int probability;
3351 gcov_type count;
3353 last2 = last = expand_gimple_stmt (stmt);
3355 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3356 if (CALL_P (last) && SIBLING_CALL_P (last))
3357 goto found;
3359 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3361 *can_fallthru = true;
3362 return NULL;
3364 found:
3365 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3366 Any instructions emitted here are about to be deleted. */
3367 do_pending_stack_adjust ();
3369 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3370 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3371 EH or abnormal edges, we shouldn't have created a tail call in
3372 the first place. So it seems to me we should just be removing
3373 all edges here, or redirecting the existing fallthru edge to
3374 the exit block. */
3376 probability = 0;
3377 count = 0;
3379 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3381 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3383 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3385 e->dest->count -= e->count;
3386 e->dest->frequency -= EDGE_FREQUENCY (e);
3387 if (e->dest->count < 0)
3388 e->dest->count = 0;
3389 if (e->dest->frequency < 0)
3390 e->dest->frequency = 0;
3392 count += e->count;
3393 probability += e->probability;
3394 remove_edge (e);
3396 else
3397 ei_next (&ei);
3400 /* This is somewhat ugly: the call_expr expander often emits instructions
3401 after the sibcall (to perform the function return). These confuse the
3402 find_many_sub_basic_blocks code, so we need to get rid of these. */
3403 last = NEXT_INSN (last);
3404 gcc_assert (BARRIER_P (last));
3406 *can_fallthru = false;
3407 while (NEXT_INSN (last))
3409 /* For instance an sqrt builtin expander expands if with
3410 sibcall in the then and label for `else`. */
3411 if (LABEL_P (NEXT_INSN (last)))
3413 *can_fallthru = true;
3414 break;
3416 delete_insn (NEXT_INSN (last));
3419 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3420 | EDGE_SIBCALL);
3421 e->probability += probability;
3422 e->count += count;
3423 BB_END (bb) = last;
3424 update_bb_for_insn (bb);
3426 if (NEXT_INSN (last))
3428 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3430 last = BB_END (bb);
3431 if (BARRIER_P (last))
3432 BB_END (bb) = PREV_INSN (last);
3435 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3437 return bb;
3440 /* Return the difference between the floor and the truncated result of
3441 a signed division by OP1 with remainder MOD. */
3442 static rtx
3443 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3445 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3446 return gen_rtx_IF_THEN_ELSE
3447 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3448 gen_rtx_IF_THEN_ELSE
3449 (mode, gen_rtx_LT (BImode,
3450 gen_rtx_DIV (mode, op1, mod),
3451 const0_rtx),
3452 constm1_rtx, const0_rtx),
3453 const0_rtx);
3456 /* Return the difference between the ceil and the truncated result of
3457 a signed division by OP1 with remainder MOD. */
3458 static rtx
3459 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3461 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3462 return gen_rtx_IF_THEN_ELSE
3463 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3464 gen_rtx_IF_THEN_ELSE
3465 (mode, gen_rtx_GT (BImode,
3466 gen_rtx_DIV (mode, op1, mod),
3467 const0_rtx),
3468 const1_rtx, const0_rtx),
3469 const0_rtx);
3472 /* Return the difference between the ceil and the truncated result of
3473 an unsigned division by OP1 with remainder MOD. */
3474 static rtx
3475 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3477 /* (mod != 0 ? 1 : 0) */
3478 return gen_rtx_IF_THEN_ELSE
3479 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3480 const1_rtx, const0_rtx);
3483 /* Return the difference between the rounded and the truncated result
3484 of a signed division by OP1 with remainder MOD. Halfway cases are
3485 rounded away from zero, rather than to the nearest even number. */
3486 static rtx
3487 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3489 /* (abs (mod) >= abs (op1) - abs (mod)
3490 ? (op1 / mod > 0 ? 1 : -1)
3491 : 0) */
3492 return gen_rtx_IF_THEN_ELSE
3493 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3494 gen_rtx_MINUS (mode,
3495 gen_rtx_ABS (mode, op1),
3496 gen_rtx_ABS (mode, mod))),
3497 gen_rtx_IF_THEN_ELSE
3498 (mode, gen_rtx_GT (BImode,
3499 gen_rtx_DIV (mode, op1, mod),
3500 const0_rtx),
3501 const1_rtx, constm1_rtx),
3502 const0_rtx);
3505 /* Return the difference between the rounded and the truncated result
3506 of a unsigned division by OP1 with remainder MOD. Halfway cases
3507 are rounded away from zero, rather than to the nearest even
3508 number. */
3509 static rtx
3510 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3512 /* (mod >= op1 - mod ? 1 : 0) */
3513 return gen_rtx_IF_THEN_ELSE
3514 (mode, gen_rtx_GE (BImode, mod,
3515 gen_rtx_MINUS (mode, op1, mod)),
3516 const1_rtx, const0_rtx);
3519 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3520 any rtl. */
3522 static rtx
3523 convert_debug_memory_address (enum machine_mode mode, rtx x,
3524 addr_space_t as)
3526 enum machine_mode xmode = GET_MODE (x);
3528 #ifndef POINTERS_EXTEND_UNSIGNED
3529 gcc_assert (mode == Pmode
3530 || mode == targetm.addr_space.address_mode (as));
3531 gcc_assert (xmode == mode || xmode == VOIDmode);
3532 #else
3533 rtx temp;
3535 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3537 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3538 return x;
3540 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3541 x = simplify_gen_subreg (mode, x, xmode,
3542 subreg_lowpart_offset
3543 (mode, xmode));
3544 else if (POINTERS_EXTEND_UNSIGNED > 0)
3545 x = gen_rtx_ZERO_EXTEND (mode, x);
3546 else if (!POINTERS_EXTEND_UNSIGNED)
3547 x = gen_rtx_SIGN_EXTEND (mode, x);
3548 else
3550 switch (GET_CODE (x))
3552 case SUBREG:
3553 if ((SUBREG_PROMOTED_VAR_P (x)
3554 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3555 || (GET_CODE (SUBREG_REG (x)) == PLUS
3556 && REG_P (XEXP (SUBREG_REG (x), 0))
3557 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3558 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3559 && GET_MODE (SUBREG_REG (x)) == mode)
3560 return SUBREG_REG (x);
3561 break;
3562 case LABEL_REF:
3563 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
3564 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3565 return temp;
3566 case SYMBOL_REF:
3567 temp = shallow_copy_rtx (x);
3568 PUT_MODE (temp, mode);
3569 return temp;
3570 case CONST:
3571 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3572 if (temp)
3573 temp = gen_rtx_CONST (mode, temp);
3574 return temp;
3575 case PLUS:
3576 case MINUS:
3577 if (CONST_INT_P (XEXP (x, 1)))
3579 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3580 if (temp)
3581 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3583 break;
3584 default:
3585 break;
3587 /* Don't know how to express ptr_extend as operation in debug info. */
3588 return NULL;
3590 #endif /* POINTERS_EXTEND_UNSIGNED */
3592 return x;
3595 /* Return an RTX equivalent to the value of the parameter DECL. */
3597 static rtx
3598 expand_debug_parm_decl (tree decl)
3600 rtx incoming = DECL_INCOMING_RTL (decl);
3602 if (incoming
3603 && GET_MODE (incoming) != BLKmode
3604 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3605 || (MEM_P (incoming)
3606 && REG_P (XEXP (incoming, 0))
3607 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3609 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3611 #ifdef HAVE_window_save
3612 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3613 If the target machine has an explicit window save instruction, the
3614 actual entry value is the corresponding OUTGOING_REGNO instead. */
3615 if (REG_P (incoming)
3616 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3617 incoming
3618 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3619 OUTGOING_REGNO (REGNO (incoming)), 0);
3620 else if (MEM_P (incoming))
3622 rtx reg = XEXP (incoming, 0);
3623 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3625 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3626 incoming = replace_equiv_address_nv (incoming, reg);
3628 else
3629 incoming = copy_rtx (incoming);
3631 #endif
3633 ENTRY_VALUE_EXP (rtl) = incoming;
3634 return rtl;
3637 if (incoming
3638 && GET_MODE (incoming) != BLKmode
3639 && !TREE_ADDRESSABLE (decl)
3640 && MEM_P (incoming)
3641 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3642 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3643 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3644 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3645 return copy_rtx (incoming);
3647 return NULL_RTX;
3650 /* Return an RTX equivalent to the value of the tree expression EXP. */
3652 static rtx
3653 expand_debug_expr (tree exp)
3655 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3656 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3657 enum machine_mode inner_mode = VOIDmode;
3658 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3659 addr_space_t as;
3661 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3663 case tcc_expression:
3664 switch (TREE_CODE (exp))
3666 case COND_EXPR:
3667 case DOT_PROD_EXPR:
3668 case WIDEN_MULT_PLUS_EXPR:
3669 case WIDEN_MULT_MINUS_EXPR:
3670 case FMA_EXPR:
3671 goto ternary;
3673 case TRUTH_ANDIF_EXPR:
3674 case TRUTH_ORIF_EXPR:
3675 case TRUTH_AND_EXPR:
3676 case TRUTH_OR_EXPR:
3677 case TRUTH_XOR_EXPR:
3678 goto binary;
3680 case TRUTH_NOT_EXPR:
3681 goto unary;
3683 default:
3684 break;
3686 break;
3688 ternary:
3689 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3690 if (!op2)
3691 return NULL_RTX;
3692 /* Fall through. */
3694 binary:
3695 case tcc_binary:
3696 case tcc_comparison:
3697 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3698 if (!op1)
3699 return NULL_RTX;
3700 /* Fall through. */
3702 unary:
3703 case tcc_unary:
3704 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3705 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3706 if (!op0)
3707 return NULL_RTX;
3708 break;
3710 case tcc_type:
3711 case tcc_statement:
3712 gcc_unreachable ();
3714 case tcc_constant:
3715 case tcc_exceptional:
3716 case tcc_declaration:
3717 case tcc_reference:
3718 case tcc_vl_exp:
3719 break;
3722 switch (TREE_CODE (exp))
3724 case STRING_CST:
3725 if (!lookup_constant_def (exp))
3727 if (strlen (TREE_STRING_POINTER (exp)) + 1
3728 != (size_t) TREE_STRING_LENGTH (exp))
3729 return NULL_RTX;
3730 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3731 op0 = gen_rtx_MEM (BLKmode, op0);
3732 set_mem_attributes (op0, exp, 0);
3733 return op0;
3735 /* Fall through... */
3737 case INTEGER_CST:
3738 case REAL_CST:
3739 case FIXED_CST:
3740 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3741 return op0;
3743 case COMPLEX_CST:
3744 gcc_assert (COMPLEX_MODE_P (mode));
3745 op0 = expand_debug_expr (TREE_REALPART (exp));
3746 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3747 return gen_rtx_CONCAT (mode, op0, op1);
3749 case DEBUG_EXPR_DECL:
3750 op0 = DECL_RTL_IF_SET (exp);
3752 if (op0)
3753 return op0;
3755 op0 = gen_rtx_DEBUG_EXPR (mode);
3756 DEBUG_EXPR_TREE_DECL (op0) = exp;
3757 SET_DECL_RTL (exp, op0);
3759 return op0;
3761 case VAR_DECL:
3762 case PARM_DECL:
3763 case FUNCTION_DECL:
3764 case LABEL_DECL:
3765 case CONST_DECL:
3766 case RESULT_DECL:
3767 op0 = DECL_RTL_IF_SET (exp);
3769 /* This decl was probably optimized away. */
3770 if (!op0)
3772 if (TREE_CODE (exp) != VAR_DECL
3773 || DECL_EXTERNAL (exp)
3774 || !TREE_STATIC (exp)
3775 || !DECL_NAME (exp)
3776 || DECL_HARD_REGISTER (exp)
3777 || DECL_IN_CONSTANT_POOL (exp)
3778 || mode == VOIDmode)
3779 return NULL;
3781 op0 = make_decl_rtl_for_debug (exp);
3782 if (!MEM_P (op0)
3783 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3784 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3785 return NULL;
3787 else
3788 op0 = copy_rtx (op0);
3790 if (GET_MODE (op0) == BLKmode
3791 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3792 below would ICE. While it is likely a FE bug,
3793 try to be robust here. See PR43166. */
3794 || mode == BLKmode
3795 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3797 gcc_assert (MEM_P (op0));
3798 op0 = adjust_address_nv (op0, mode, 0);
3799 return op0;
3802 /* Fall through. */
3804 adjust_mode:
3805 case PAREN_EXPR:
3806 case NOP_EXPR:
3807 case CONVERT_EXPR:
3809 inner_mode = GET_MODE (op0);
3811 if (mode == inner_mode)
3812 return op0;
3814 if (inner_mode == VOIDmode)
3816 if (TREE_CODE (exp) == SSA_NAME)
3817 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3818 else
3819 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3820 if (mode == inner_mode)
3821 return op0;
3824 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3826 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3827 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3828 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3829 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3830 else
3831 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3833 else if (FLOAT_MODE_P (mode))
3835 gcc_assert (TREE_CODE (exp) != SSA_NAME);
3836 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3837 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3838 else
3839 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3841 else if (FLOAT_MODE_P (inner_mode))
3843 if (unsignedp)
3844 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3845 else
3846 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3848 else if (CONSTANT_P (op0)
3849 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3850 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3851 subreg_lowpart_offset (mode,
3852 inner_mode));
3853 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3854 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3855 : unsignedp)
3856 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3857 else
3858 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3860 return op0;
3863 case MEM_REF:
3864 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3866 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3867 TREE_OPERAND (exp, 0),
3868 TREE_OPERAND (exp, 1));
3869 if (newexp)
3870 return expand_debug_expr (newexp);
3872 /* FALLTHROUGH */
3873 case INDIRECT_REF:
3874 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3875 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3876 if (!op0)
3877 return NULL;
3879 if (TREE_CODE (exp) == MEM_REF)
3881 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3882 || (GET_CODE (op0) == PLUS
3883 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3884 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3885 Instead just use get_inner_reference. */
3886 goto component_ref;
3888 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3889 if (!op1 || !CONST_INT_P (op1))
3890 return NULL;
3892 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
3895 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3896 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3897 else
3898 as = ADDR_SPACE_GENERIC;
3900 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3901 op0, as);
3902 if (op0 == NULL_RTX)
3903 return NULL;
3905 op0 = gen_rtx_MEM (mode, op0);
3906 set_mem_attributes (op0, exp, 0);
3907 if (TREE_CODE (exp) == MEM_REF
3908 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3909 set_mem_expr (op0, NULL_TREE);
3910 set_mem_addr_space (op0, as);
3912 return op0;
3914 case TARGET_MEM_REF:
3915 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3916 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
3917 return NULL;
3919 op0 = expand_debug_expr
3920 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
3921 if (!op0)
3922 return NULL;
3924 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3925 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3926 else
3927 as = ADDR_SPACE_GENERIC;
3929 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3930 op0, as);
3931 if (op0 == NULL_RTX)
3932 return NULL;
3934 op0 = gen_rtx_MEM (mode, op0);
3936 set_mem_attributes (op0, exp, 0);
3937 set_mem_addr_space (op0, as);
3939 return op0;
3941 component_ref:
3942 case ARRAY_REF:
3943 case ARRAY_RANGE_REF:
3944 case COMPONENT_REF:
3945 case BIT_FIELD_REF:
3946 case REALPART_EXPR:
3947 case IMAGPART_EXPR:
3948 case VIEW_CONVERT_EXPR:
3950 enum machine_mode mode1;
3951 HOST_WIDE_INT bitsize, bitpos;
3952 tree offset;
3953 int volatilep = 0;
3954 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3955 &mode1, &unsignedp, &volatilep, false);
3956 rtx orig_op0;
3958 if (bitsize == 0)
3959 return NULL;
3961 orig_op0 = op0 = expand_debug_expr (tem);
3963 if (!op0)
3964 return NULL;
3966 if (offset)
3968 enum machine_mode addrmode, offmode;
3970 if (!MEM_P (op0))
3971 return NULL;
3973 op0 = XEXP (op0, 0);
3974 addrmode = GET_MODE (op0);
3975 if (addrmode == VOIDmode)
3976 addrmode = Pmode;
3978 op1 = expand_debug_expr (offset);
3979 if (!op1)
3980 return NULL;
3982 offmode = GET_MODE (op1);
3983 if (offmode == VOIDmode)
3984 offmode = TYPE_MODE (TREE_TYPE (offset));
3986 if (addrmode != offmode)
3987 op1 = simplify_gen_subreg (addrmode, op1, offmode,
3988 subreg_lowpart_offset (addrmode,
3989 offmode));
3991 /* Don't use offset_address here, we don't need a
3992 recognizable address, and we don't want to generate
3993 code. */
3994 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
3995 op0, op1));
3998 if (MEM_P (op0))
4000 if (mode1 == VOIDmode)
4001 /* Bitfield. */
4002 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4003 if (bitpos >= BITS_PER_UNIT)
4005 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4006 bitpos %= BITS_PER_UNIT;
4008 else if (bitpos < 0)
4010 HOST_WIDE_INT units
4011 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4012 op0 = adjust_address_nv (op0, mode1, units);
4013 bitpos += units * BITS_PER_UNIT;
4015 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4016 op0 = adjust_address_nv (op0, mode, 0);
4017 else if (GET_MODE (op0) != mode1)
4018 op0 = adjust_address_nv (op0, mode1, 0);
4019 else
4020 op0 = copy_rtx (op0);
4021 if (op0 == orig_op0)
4022 op0 = shallow_copy_rtx (op0);
4023 set_mem_attributes (op0, exp, 0);
4026 if (bitpos == 0 && mode == GET_MODE (op0))
4027 return op0;
4029 if (bitpos < 0)
4030 return NULL;
4032 if (GET_MODE (op0) == BLKmode)
4033 return NULL;
4035 if ((bitpos % BITS_PER_UNIT) == 0
4036 && bitsize == GET_MODE_BITSIZE (mode1))
4038 enum machine_mode opmode = GET_MODE (op0);
4040 if (opmode == VOIDmode)
4041 opmode = TYPE_MODE (TREE_TYPE (tem));
4043 /* This condition may hold if we're expanding the address
4044 right past the end of an array that turned out not to
4045 be addressable (i.e., the address was only computed in
4046 debug stmts). The gen_subreg below would rightfully
4047 crash, and the address doesn't really exist, so just
4048 drop it. */
4049 if (bitpos >= GET_MODE_BITSIZE (opmode))
4050 return NULL;
4052 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4053 return simplify_gen_subreg (mode, op0, opmode,
4054 bitpos / BITS_PER_UNIT);
4057 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4058 && TYPE_UNSIGNED (TREE_TYPE (exp))
4059 ? SIGN_EXTRACT
4060 : ZERO_EXTRACT, mode,
4061 GET_MODE (op0) != VOIDmode
4062 ? GET_MODE (op0)
4063 : TYPE_MODE (TREE_TYPE (tem)),
4064 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4067 case ABS_EXPR:
4068 return simplify_gen_unary (ABS, mode, op0, mode);
4070 case NEGATE_EXPR:
4071 return simplify_gen_unary (NEG, mode, op0, mode);
4073 case BIT_NOT_EXPR:
4074 return simplify_gen_unary (NOT, mode, op0, mode);
4076 case FLOAT_EXPR:
4077 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4078 0)))
4079 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4080 inner_mode);
4082 case FIX_TRUNC_EXPR:
4083 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4084 inner_mode);
4086 case POINTER_PLUS_EXPR:
4087 /* For the rare target where pointers are not the same size as
4088 size_t, we need to check for mis-matched modes and correct
4089 the addend. */
4090 if (op0 && op1
4091 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4092 && GET_MODE (op0) != GET_MODE (op1))
4094 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4095 /* If OP0 is a partial mode, then we must truncate, even if it has
4096 the same bitsize as OP1 as GCC's representation of partial modes
4097 is opaque. */
4098 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4099 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4100 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4101 GET_MODE (op1));
4102 else
4103 /* We always sign-extend, regardless of the signedness of
4104 the operand, because the operand is always unsigned
4105 here even if the original C expression is signed. */
4106 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4107 GET_MODE (op1));
4109 /* Fall through. */
4110 case PLUS_EXPR:
4111 return simplify_gen_binary (PLUS, mode, op0, op1);
4113 case MINUS_EXPR:
4114 return simplify_gen_binary (MINUS, mode, op0, op1);
4116 case MULT_EXPR:
4117 return simplify_gen_binary (MULT, mode, op0, op1);
4119 case RDIV_EXPR:
4120 case TRUNC_DIV_EXPR:
4121 case EXACT_DIV_EXPR:
4122 if (unsignedp)
4123 return simplify_gen_binary (UDIV, mode, op0, op1);
4124 else
4125 return simplify_gen_binary (DIV, mode, op0, op1);
4127 case TRUNC_MOD_EXPR:
4128 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4130 case FLOOR_DIV_EXPR:
4131 if (unsignedp)
4132 return simplify_gen_binary (UDIV, mode, op0, op1);
4133 else
4135 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4136 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4137 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4138 return simplify_gen_binary (PLUS, mode, div, adj);
4141 case FLOOR_MOD_EXPR:
4142 if (unsignedp)
4143 return simplify_gen_binary (UMOD, mode, op0, op1);
4144 else
4146 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4147 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4148 adj = simplify_gen_unary (NEG, mode,
4149 simplify_gen_binary (MULT, mode, adj, op1),
4150 mode);
4151 return simplify_gen_binary (PLUS, mode, mod, adj);
4154 case CEIL_DIV_EXPR:
4155 if (unsignedp)
4157 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4158 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4159 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4160 return simplify_gen_binary (PLUS, mode, div, adj);
4162 else
4164 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4165 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4166 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4167 return simplify_gen_binary (PLUS, mode, div, adj);
4170 case CEIL_MOD_EXPR:
4171 if (unsignedp)
4173 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4174 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4175 adj = simplify_gen_unary (NEG, mode,
4176 simplify_gen_binary (MULT, mode, adj, op1),
4177 mode);
4178 return simplify_gen_binary (PLUS, mode, mod, adj);
4180 else
4182 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4183 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4184 adj = simplify_gen_unary (NEG, mode,
4185 simplify_gen_binary (MULT, mode, adj, op1),
4186 mode);
4187 return simplify_gen_binary (PLUS, mode, mod, adj);
4190 case ROUND_DIV_EXPR:
4191 if (unsignedp)
4193 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4194 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4195 rtx adj = round_udiv_adjust (mode, mod, op1);
4196 return simplify_gen_binary (PLUS, mode, div, adj);
4198 else
4200 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4201 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4202 rtx adj = round_sdiv_adjust (mode, mod, op1);
4203 return simplify_gen_binary (PLUS, mode, div, adj);
4206 case ROUND_MOD_EXPR:
4207 if (unsignedp)
4209 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4210 rtx adj = round_udiv_adjust (mode, mod, op1);
4211 adj = simplify_gen_unary (NEG, mode,
4212 simplify_gen_binary (MULT, mode, adj, op1),
4213 mode);
4214 return simplify_gen_binary (PLUS, mode, mod, adj);
4216 else
4218 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4219 rtx adj = round_sdiv_adjust (mode, mod, op1);
4220 adj = simplify_gen_unary (NEG, mode,
4221 simplify_gen_binary (MULT, mode, adj, op1),
4222 mode);
4223 return simplify_gen_binary (PLUS, mode, mod, adj);
4226 case LSHIFT_EXPR:
4227 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4229 case RSHIFT_EXPR:
4230 if (unsignedp)
4231 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4232 else
4233 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4235 case LROTATE_EXPR:
4236 return simplify_gen_binary (ROTATE, mode, op0, op1);
4238 case RROTATE_EXPR:
4239 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4241 case MIN_EXPR:
4242 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4244 case MAX_EXPR:
4245 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4247 case BIT_AND_EXPR:
4248 case TRUTH_AND_EXPR:
4249 return simplify_gen_binary (AND, mode, op0, op1);
4251 case BIT_IOR_EXPR:
4252 case TRUTH_OR_EXPR:
4253 return simplify_gen_binary (IOR, mode, op0, op1);
4255 case BIT_XOR_EXPR:
4256 case TRUTH_XOR_EXPR:
4257 return simplify_gen_binary (XOR, mode, op0, op1);
4259 case TRUTH_ANDIF_EXPR:
4260 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4262 case TRUTH_ORIF_EXPR:
4263 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4265 case TRUTH_NOT_EXPR:
4266 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4268 case LT_EXPR:
4269 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4270 op0, op1);
4272 case LE_EXPR:
4273 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4274 op0, op1);
4276 case GT_EXPR:
4277 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4278 op0, op1);
4280 case GE_EXPR:
4281 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4282 op0, op1);
4284 case EQ_EXPR:
4285 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4287 case NE_EXPR:
4288 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4290 case UNORDERED_EXPR:
4291 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4293 case ORDERED_EXPR:
4294 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4296 case UNLT_EXPR:
4297 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4299 case UNLE_EXPR:
4300 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4302 case UNGT_EXPR:
4303 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4305 case UNGE_EXPR:
4306 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4308 case UNEQ_EXPR:
4309 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4311 case LTGT_EXPR:
4312 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4314 case COND_EXPR:
4315 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4317 case COMPLEX_EXPR:
4318 gcc_assert (COMPLEX_MODE_P (mode));
4319 if (GET_MODE (op0) == VOIDmode)
4320 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4321 if (GET_MODE (op1) == VOIDmode)
4322 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4323 return gen_rtx_CONCAT (mode, op0, op1);
4325 case CONJ_EXPR:
4326 if (GET_CODE (op0) == CONCAT)
4327 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4328 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4329 XEXP (op0, 1),
4330 GET_MODE_INNER (mode)));
4331 else
4333 enum machine_mode imode = GET_MODE_INNER (mode);
4334 rtx re, im;
4336 if (MEM_P (op0))
4338 re = adjust_address_nv (op0, imode, 0);
4339 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4341 else
4343 enum machine_mode ifmode = int_mode_for_mode (mode);
4344 enum machine_mode ihmode = int_mode_for_mode (imode);
4345 rtx halfsize;
4346 if (ifmode == BLKmode || ihmode == BLKmode)
4347 return NULL;
4348 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4349 re = op0;
4350 if (mode != ifmode)
4351 re = gen_rtx_SUBREG (ifmode, re, 0);
4352 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4353 if (imode != ihmode)
4354 re = gen_rtx_SUBREG (imode, re, 0);
4355 im = copy_rtx (op0);
4356 if (mode != ifmode)
4357 im = gen_rtx_SUBREG (ifmode, im, 0);
4358 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4359 if (imode != ihmode)
4360 im = gen_rtx_SUBREG (imode, im, 0);
4362 im = gen_rtx_NEG (imode, im);
4363 return gen_rtx_CONCAT (mode, re, im);
4366 case ADDR_EXPR:
4367 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4368 if (!op0 || !MEM_P (op0))
4370 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4371 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4372 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4373 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4374 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4375 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4377 if (handled_component_p (TREE_OPERAND (exp, 0)))
4379 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4380 tree decl
4381 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4382 &bitoffset, &bitsize, &maxsize);
4383 if ((TREE_CODE (decl) == VAR_DECL
4384 || TREE_CODE (decl) == PARM_DECL
4385 || TREE_CODE (decl) == RESULT_DECL)
4386 && (!TREE_ADDRESSABLE (decl)
4387 || target_for_debug_bind (decl))
4388 && (bitoffset % BITS_PER_UNIT) == 0
4389 && bitsize > 0
4390 && bitsize == maxsize)
4392 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4393 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4397 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4398 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4399 == ADDR_EXPR)
4401 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4402 0));
4403 if (op0 != NULL
4404 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4405 || (GET_CODE (op0) == PLUS
4406 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4407 && CONST_INT_P (XEXP (op0, 1)))))
4409 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4410 1));
4411 if (!op1 || !CONST_INT_P (op1))
4412 return NULL;
4414 return plus_constant (mode, op0, INTVAL (op1));
4418 return NULL;
4421 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
4422 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4424 return op0;
4426 case VECTOR_CST:
4428 unsigned i;
4430 op0 = gen_rtx_CONCATN
4431 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4433 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4435 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4436 if (!op1)
4437 return NULL;
4438 XVECEXP (op0, 0, i) = op1;
4441 return op0;
4444 case CONSTRUCTOR:
4445 if (TREE_CLOBBER_P (exp))
4446 return NULL;
4447 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4449 unsigned i;
4450 tree val;
4452 op0 = gen_rtx_CONCATN
4453 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4455 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4457 op1 = expand_debug_expr (val);
4458 if (!op1)
4459 return NULL;
4460 XVECEXP (op0, 0, i) = op1;
4463 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4465 op1 = expand_debug_expr
4466 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4468 if (!op1)
4469 return NULL;
4471 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4472 XVECEXP (op0, 0, i) = op1;
4475 return op0;
4477 else
4478 goto flag_unsupported;
4480 case CALL_EXPR:
4481 /* ??? Maybe handle some builtins? */
4482 return NULL;
4484 case SSA_NAME:
4486 gimple g = get_gimple_for_ssa_name (exp);
4487 if (g)
4489 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4490 if (!op0)
4491 return NULL;
4493 else
4495 int part = var_to_partition (SA.map, exp);
4497 if (part == NO_PARTITION)
4499 /* If this is a reference to an incoming value of parameter
4500 that is never used in the code or where the incoming
4501 value is never used in the code, use PARM_DECL's
4502 DECL_RTL if set. */
4503 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4504 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4506 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4507 if (op0)
4508 goto adjust_mode;
4509 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4510 if (op0)
4511 goto adjust_mode;
4513 return NULL;
4516 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4518 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4520 goto adjust_mode;
4523 case ERROR_MARK:
4524 return NULL;
4526 /* Vector stuff. For most of the codes we don't have rtl codes. */
4527 case REALIGN_LOAD_EXPR:
4528 case REDUC_MAX_EXPR:
4529 case REDUC_MIN_EXPR:
4530 case REDUC_PLUS_EXPR:
4531 case VEC_COND_EXPR:
4532 case VEC_LSHIFT_EXPR:
4533 case VEC_PACK_FIX_TRUNC_EXPR:
4534 case VEC_PACK_SAT_EXPR:
4535 case VEC_PACK_TRUNC_EXPR:
4536 case VEC_RSHIFT_EXPR:
4537 case VEC_UNPACK_FLOAT_HI_EXPR:
4538 case VEC_UNPACK_FLOAT_LO_EXPR:
4539 case VEC_UNPACK_HI_EXPR:
4540 case VEC_UNPACK_LO_EXPR:
4541 case VEC_WIDEN_MULT_HI_EXPR:
4542 case VEC_WIDEN_MULT_LO_EXPR:
4543 case VEC_WIDEN_MULT_EVEN_EXPR:
4544 case VEC_WIDEN_MULT_ODD_EXPR:
4545 case VEC_WIDEN_LSHIFT_HI_EXPR:
4546 case VEC_WIDEN_LSHIFT_LO_EXPR:
4547 case VEC_PERM_EXPR:
4548 return NULL;
4550 /* Misc codes. */
4551 case ADDR_SPACE_CONVERT_EXPR:
4552 case FIXED_CONVERT_EXPR:
4553 case OBJ_TYPE_REF:
4554 case WITH_SIZE_EXPR:
4555 return NULL;
4557 case DOT_PROD_EXPR:
4558 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4559 && SCALAR_INT_MODE_P (mode))
4562 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4563 0)))
4564 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4565 inner_mode);
4567 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4568 1)))
4569 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4570 inner_mode);
4571 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4572 return simplify_gen_binary (PLUS, mode, op0, op2);
4574 return NULL;
4576 case WIDEN_MULT_EXPR:
4577 case WIDEN_MULT_PLUS_EXPR:
4578 case WIDEN_MULT_MINUS_EXPR:
4579 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4580 && SCALAR_INT_MODE_P (mode))
4582 inner_mode = GET_MODE (op0);
4583 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4584 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4585 else
4586 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4587 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4588 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4589 else
4590 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4591 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4592 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4593 return op0;
4594 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4595 return simplify_gen_binary (PLUS, mode, op0, op2);
4596 else
4597 return simplify_gen_binary (MINUS, mode, op2, op0);
4599 return NULL;
4601 case MULT_HIGHPART_EXPR:
4602 /* ??? Similar to the above. */
4603 return NULL;
4605 case WIDEN_SUM_EXPR:
4606 case WIDEN_LSHIFT_EXPR:
4607 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4608 && SCALAR_INT_MODE_P (mode))
4611 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4612 0)))
4613 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4614 inner_mode);
4615 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4616 ? ASHIFT : PLUS, mode, op0, op1);
4618 return NULL;
4620 case FMA_EXPR:
4621 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4623 default:
4624 flag_unsupported:
4625 #ifdef ENABLE_CHECKING
4626 debug_tree (exp);
4627 gcc_unreachable ();
4628 #else
4629 return NULL;
4630 #endif
4634 /* Return an RTX equivalent to the source bind value of the tree expression
4635 EXP. */
4637 static rtx
4638 expand_debug_source_expr (tree exp)
4640 rtx op0 = NULL_RTX;
4641 enum machine_mode mode = VOIDmode, inner_mode;
4643 switch (TREE_CODE (exp))
4645 case PARM_DECL:
4647 mode = DECL_MODE (exp);
4648 op0 = expand_debug_parm_decl (exp);
4649 if (op0)
4650 break;
4651 /* See if this isn't an argument that has been completely
4652 optimized out. */
4653 if (!DECL_RTL_SET_P (exp)
4654 && !DECL_INCOMING_RTL (exp)
4655 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4657 tree aexp = DECL_ORIGIN (exp);
4658 if (DECL_CONTEXT (aexp)
4659 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4661 vec<tree, va_gc> **debug_args;
4662 unsigned int ix;
4663 tree ddecl;
4664 debug_args = decl_debug_args_lookup (current_function_decl);
4665 if (debug_args != NULL)
4667 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4668 ix += 2)
4669 if (ddecl == aexp)
4670 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4674 break;
4676 default:
4677 break;
4680 if (op0 == NULL_RTX)
4681 return NULL_RTX;
4683 inner_mode = GET_MODE (op0);
4684 if (mode == inner_mode)
4685 return op0;
4687 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4689 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4690 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4691 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4692 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4693 else
4694 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4696 else if (FLOAT_MODE_P (mode))
4697 gcc_unreachable ();
4698 else if (FLOAT_MODE_P (inner_mode))
4700 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4701 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4702 else
4703 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4705 else if (CONSTANT_P (op0)
4706 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4707 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4708 subreg_lowpart_offset (mode, inner_mode));
4709 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4710 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4711 else
4712 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4714 return op0;
4717 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4718 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4719 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4721 static void
4722 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
4724 rtx exp = *exp_p;
4726 if (exp == NULL_RTX)
4727 return;
4729 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4730 return;
4732 if (depth == 4)
4734 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4735 rtx dval = make_debug_expr_from_rtl (exp);
4737 /* Emit a debug bind insn before INSN. */
4738 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4739 DEBUG_EXPR_TREE_DECL (dval), exp,
4740 VAR_INIT_STATUS_INITIALIZED);
4742 emit_debug_insn_before (bind, insn);
4743 *exp_p = dval;
4744 return;
4747 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4748 int i, j;
4749 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4750 switch (*format_ptr++)
4752 case 'e':
4753 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4754 break;
4756 case 'E':
4757 case 'V':
4758 for (j = 0; j < XVECLEN (exp, i); j++)
4759 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4760 break;
4762 default:
4763 break;
4767 /* Expand the _LOCs in debug insns. We run this after expanding all
4768 regular insns, so that any variables referenced in the function
4769 will have their DECL_RTLs set. */
4771 static void
4772 expand_debug_locations (void)
4774 rtx insn;
4775 rtx last = get_last_insn ();
4776 int save_strict_alias = flag_strict_aliasing;
4778 /* New alias sets while setting up memory attributes cause
4779 -fcompare-debug failures, even though it doesn't bring about any
4780 codegen changes. */
4781 flag_strict_aliasing = 0;
4783 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4784 if (DEBUG_INSN_P (insn))
4786 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4787 rtx val, prev_insn, insn2;
4788 enum machine_mode mode;
4790 if (value == NULL_TREE)
4791 val = NULL_RTX;
4792 else
4794 if (INSN_VAR_LOCATION_STATUS (insn)
4795 == VAR_INIT_STATUS_UNINITIALIZED)
4796 val = expand_debug_source_expr (value);
4797 else
4798 val = expand_debug_expr (value);
4799 gcc_assert (last == get_last_insn ());
4802 if (!val)
4803 val = gen_rtx_UNKNOWN_VAR_LOC ();
4804 else
4806 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4808 gcc_assert (mode == GET_MODE (val)
4809 || (GET_MODE (val) == VOIDmode
4810 && (CONST_SCALAR_INT_P (val)
4811 || GET_CODE (val) == CONST_FIXED
4812 || GET_CODE (val) == LABEL_REF)));
4815 INSN_VAR_LOCATION_LOC (insn) = val;
4816 prev_insn = PREV_INSN (insn);
4817 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4818 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
4821 flag_strict_aliasing = save_strict_alias;
4824 /* Expand basic block BB from GIMPLE trees to RTL. */
4826 static basic_block
4827 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
4829 gimple_stmt_iterator gsi;
4830 gimple_seq stmts;
4831 gimple stmt = NULL;
4832 rtx note, last;
4833 edge e;
4834 edge_iterator ei;
4835 void **elt;
4837 if (dump_file)
4838 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4839 bb->index);
4841 /* Note that since we are now transitioning from GIMPLE to RTL, we
4842 cannot use the gsi_*_bb() routines because they expect the basic
4843 block to be in GIMPLE, instead of RTL. Therefore, we need to
4844 access the BB sequence directly. */
4845 stmts = bb_seq (bb);
4846 bb->il.gimple.seq = NULL;
4847 bb->il.gimple.phi_nodes = NULL;
4848 rtl_profile_for_bb (bb);
4849 init_rtl_bb_info (bb);
4850 bb->flags |= BB_RTL;
4852 /* Remove the RETURN_EXPR if we may fall though to the exit
4853 instead. */
4854 gsi = gsi_last (stmts);
4855 if (!gsi_end_p (gsi)
4856 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
4858 gimple ret_stmt = gsi_stmt (gsi);
4860 gcc_assert (single_succ_p (bb));
4861 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
4863 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4864 && !gimple_return_retval (ret_stmt))
4866 gsi_remove (&gsi, false);
4867 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4871 gsi = gsi_start (stmts);
4872 if (!gsi_end_p (gsi))
4874 stmt = gsi_stmt (gsi);
4875 if (gimple_code (stmt) != GIMPLE_LABEL)
4876 stmt = NULL;
4879 elt = pointer_map_contains (lab_rtx_for_bb, bb);
4881 if (stmt || elt)
4883 last = get_last_insn ();
4885 if (stmt)
4887 expand_gimple_stmt (stmt);
4888 gsi_next (&gsi);
4891 if (elt)
4892 emit_label ((rtx) *elt);
4894 /* Java emits line number notes in the top of labels.
4895 ??? Make this go away once line number notes are obsoleted. */
4896 BB_HEAD (bb) = NEXT_INSN (last);
4897 if (NOTE_P (BB_HEAD (bb)))
4898 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
4899 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
4901 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4903 else
4904 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
4906 NOTE_BASIC_BLOCK (note) = bb;
4908 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4910 basic_block new_bb;
4912 stmt = gsi_stmt (gsi);
4914 /* If this statement is a non-debug one, and we generate debug
4915 insns, then this one might be the last real use of a TERed
4916 SSA_NAME, but where there are still some debug uses further
4917 down. Expanding the current SSA name in such further debug
4918 uses by their RHS might lead to wrong debug info, as coalescing
4919 might make the operands of such RHS be placed into the same
4920 pseudo as something else. Like so:
4921 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4922 use(a_1);
4923 a_2 = ...
4924 #DEBUG ... => a_1
4925 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4926 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4927 the write to a_2 would actually have clobbered the place which
4928 formerly held a_0.
4930 So, instead of that, we recognize the situation, and generate
4931 debug temporaries at the last real use of TERed SSA names:
4932 a_1 = a_0 + 1;
4933 #DEBUG #D1 => a_1
4934 use(a_1);
4935 a_2 = ...
4936 #DEBUG ... => #D1
4938 if (MAY_HAVE_DEBUG_INSNS
4939 && SA.values
4940 && !is_gimple_debug (stmt))
4942 ssa_op_iter iter;
4943 tree op;
4944 gimple def;
4946 location_t sloc = curr_insn_location ();
4948 /* Look for SSA names that have their last use here (TERed
4949 names always have only one real use). */
4950 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4951 if ((def = get_gimple_for_ssa_name (op)))
4953 imm_use_iterator imm_iter;
4954 use_operand_p use_p;
4955 bool have_debug_uses = false;
4957 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4959 if (gimple_debug_bind_p (USE_STMT (use_p)))
4961 have_debug_uses = true;
4962 break;
4966 if (have_debug_uses)
4968 /* OP is a TERed SSA name, with DEF it's defining
4969 statement, and where OP is used in further debug
4970 instructions. Generate a debug temporary, and
4971 replace all uses of OP in debug insns with that
4972 temporary. */
4973 gimple debugstmt;
4974 tree value = gimple_assign_rhs_to_tree (def);
4975 tree vexpr = make_node (DEBUG_EXPR_DECL);
4976 rtx val;
4977 enum machine_mode mode;
4979 set_curr_insn_location (gimple_location (def));
4981 DECL_ARTIFICIAL (vexpr) = 1;
4982 TREE_TYPE (vexpr) = TREE_TYPE (value);
4983 if (DECL_P (value))
4984 mode = DECL_MODE (value);
4985 else
4986 mode = TYPE_MODE (TREE_TYPE (value));
4987 DECL_MODE (vexpr) = mode;
4989 val = gen_rtx_VAR_LOCATION
4990 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
4992 emit_debug_insn (val);
4994 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
4996 if (!gimple_debug_bind_p (debugstmt))
4997 continue;
4999 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5000 SET_USE (use_p, vexpr);
5002 update_stmt (debugstmt);
5006 set_curr_insn_location (sloc);
5009 currently_expanding_gimple_stmt = stmt;
5011 /* Expand this statement, then evaluate the resulting RTL and
5012 fixup the CFG accordingly. */
5013 if (gimple_code (stmt) == GIMPLE_COND)
5015 new_bb = expand_gimple_cond (bb, stmt);
5016 if (new_bb)
5017 return new_bb;
5019 else if (gimple_debug_bind_p (stmt))
5021 location_t sloc = curr_insn_location ();
5022 gimple_stmt_iterator nsi = gsi;
5024 for (;;)
5026 tree var = gimple_debug_bind_get_var (stmt);
5027 tree value;
5028 rtx val;
5029 enum machine_mode mode;
5031 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5032 && TREE_CODE (var) != LABEL_DECL
5033 && !target_for_debug_bind (var))
5034 goto delink_debug_stmt;
5036 if (gimple_debug_bind_has_value_p (stmt))
5037 value = gimple_debug_bind_get_value (stmt);
5038 else
5039 value = NULL_TREE;
5041 last = get_last_insn ();
5043 set_curr_insn_location (gimple_location (stmt));
5045 if (DECL_P (var))
5046 mode = DECL_MODE (var);
5047 else
5048 mode = TYPE_MODE (TREE_TYPE (var));
5050 val = gen_rtx_VAR_LOCATION
5051 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5053 emit_debug_insn (val);
5055 if (dump_file && (dump_flags & TDF_DETAILS))
5057 /* We can't dump the insn with a TREE where an RTX
5058 is expected. */
5059 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5060 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5061 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5064 delink_debug_stmt:
5065 /* In order not to generate too many debug temporaries,
5066 we delink all uses of debug statements we already expanded.
5067 Therefore debug statements between definition and real
5068 use of TERed SSA names will continue to use the SSA name,
5069 and not be replaced with debug temps. */
5070 delink_stmt_imm_use (stmt);
5072 gsi = nsi;
5073 gsi_next (&nsi);
5074 if (gsi_end_p (nsi))
5075 break;
5076 stmt = gsi_stmt (nsi);
5077 if (!gimple_debug_bind_p (stmt))
5078 break;
5081 set_curr_insn_location (sloc);
5083 else if (gimple_debug_source_bind_p (stmt))
5085 location_t sloc = curr_insn_location ();
5086 tree var = gimple_debug_source_bind_get_var (stmt);
5087 tree value = gimple_debug_source_bind_get_value (stmt);
5088 rtx val;
5089 enum machine_mode mode;
5091 last = get_last_insn ();
5093 set_curr_insn_location (gimple_location (stmt));
5095 mode = DECL_MODE (var);
5097 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5098 VAR_INIT_STATUS_UNINITIALIZED);
5100 emit_debug_insn (val);
5102 if (dump_file && (dump_flags & TDF_DETAILS))
5104 /* We can't dump the insn with a TREE where an RTX
5105 is expected. */
5106 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5107 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5108 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5111 set_curr_insn_location (sloc);
5113 else
5115 if (is_gimple_call (stmt)
5116 && gimple_call_tail_p (stmt)
5117 && disable_tail_calls)
5118 gimple_call_set_tail (stmt, false);
5120 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
5122 bool can_fallthru;
5123 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5124 if (new_bb)
5126 if (can_fallthru)
5127 bb = new_bb;
5128 else
5129 return new_bb;
5132 else
5134 def_operand_p def_p;
5135 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5137 if (def_p != NULL)
5139 /* Ignore this stmt if it is in the list of
5140 replaceable expressions. */
5141 if (SA.values
5142 && bitmap_bit_p (SA.values,
5143 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5144 continue;
5146 last = expand_gimple_stmt (stmt);
5147 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5152 currently_expanding_gimple_stmt = NULL;
5154 /* Expand implicit goto and convert goto_locus. */
5155 FOR_EACH_EDGE (e, ei, bb->succs)
5157 if (e->goto_locus != UNKNOWN_LOCATION)
5158 set_curr_insn_location (e->goto_locus);
5159 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5161 emit_jump (label_rtx_for_bb (e->dest));
5162 e->flags &= ~EDGE_FALLTHRU;
5166 /* Expanded RTL can create a jump in the last instruction of block.
5167 This later might be assumed to be a jump to successor and break edge insertion.
5168 We need to insert dummy move to prevent this. PR41440. */
5169 if (single_succ_p (bb)
5170 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5171 && (last = get_last_insn ())
5172 && JUMP_P (last))
5174 rtx dummy = gen_reg_rtx (SImode);
5175 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5178 do_pending_stack_adjust ();
5180 /* Find the block tail. The last insn in the block is the insn
5181 before a barrier and/or table jump insn. */
5182 last = get_last_insn ();
5183 if (BARRIER_P (last))
5184 last = PREV_INSN (last);
5185 if (JUMP_TABLE_DATA_P (last))
5186 last = PREV_INSN (PREV_INSN (last));
5187 BB_END (bb) = last;
5189 update_bb_for_insn (bb);
5191 return bb;
5195 /* Create a basic block for initialization code. */
5197 static basic_block
5198 construct_init_block (void)
5200 basic_block init_block, first_block;
5201 edge e = NULL;
5202 int flags;
5204 /* Multiple entry points not supported yet. */
5205 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5206 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5207 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5208 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5209 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5211 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5213 /* When entry edge points to first basic block, we don't need jump,
5214 otherwise we have to jump into proper target. */
5215 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5217 tree label = gimple_block_label (e->dest);
5219 emit_jump (label_rtx (label));
5220 flags = 0;
5222 else
5223 flags = EDGE_FALLTHRU;
5225 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5226 get_last_insn (),
5227 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5228 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5229 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5230 if (current_loops && ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father)
5231 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5232 if (e)
5234 first_block = e->dest;
5235 redirect_edge_succ (e, init_block);
5236 e = make_edge (init_block, first_block, flags);
5238 else
5239 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5240 e->probability = REG_BR_PROB_BASE;
5241 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5243 update_bb_for_insn (init_block);
5244 return init_block;
5247 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5248 found in the block tree. */
5250 static void
5251 set_block_levels (tree block, int level)
5253 while (block)
5255 BLOCK_NUMBER (block) = level;
5256 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5257 block = BLOCK_CHAIN (block);
5261 /* Create a block containing landing pads and similar stuff. */
5263 static void
5264 construct_exit_block (void)
5266 rtx head = get_last_insn ();
5267 rtx end;
5268 basic_block exit_block;
5269 edge e, e2;
5270 unsigned ix;
5271 edge_iterator ei;
5272 rtx orig_end = BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5274 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5276 /* Make sure the locus is set to the end of the function, so that
5277 epilogue line numbers and warnings are set properly. */
5278 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5279 input_location = cfun->function_end_locus;
5281 /* Generate rtl for function exit. */
5282 expand_function_end ();
5284 end = get_last_insn ();
5285 if (head == end)
5286 return;
5287 /* While emitting the function end we could move end of the last basic block.
5289 BB_END (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb) = orig_end;
5290 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5291 head = NEXT_INSN (head);
5292 exit_block = create_basic_block (NEXT_INSN (head), end,
5293 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5294 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5295 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5296 if (current_loops && EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father)
5297 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5299 ix = 0;
5300 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5302 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5303 if (!(e->flags & EDGE_ABNORMAL))
5304 redirect_edge_succ (e, exit_block);
5305 else
5306 ix++;
5309 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5310 e->probability = REG_BR_PROB_BASE;
5311 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5312 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5313 if (e2 != e)
5315 e->count -= e2->count;
5316 exit_block->count -= e2->count;
5317 exit_block->frequency -= EDGE_FREQUENCY (e2);
5319 if (e->count < 0)
5320 e->count = 0;
5321 if (exit_block->count < 0)
5322 exit_block->count = 0;
5323 if (exit_block->frequency < 0)
5324 exit_block->frequency = 0;
5325 update_bb_for_insn (exit_block);
5328 /* Helper function for discover_nonconstant_array_refs.
5329 Look for ARRAY_REF nodes with non-constant indexes and mark them
5330 addressable. */
5332 static tree
5333 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5334 void *data ATTRIBUTE_UNUSED)
5336 tree t = *tp;
5338 if (IS_TYPE_OR_DECL_P (t))
5339 *walk_subtrees = 0;
5340 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5342 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5343 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5344 && (!TREE_OPERAND (t, 2)
5345 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5346 || (TREE_CODE (t) == COMPONENT_REF
5347 && (!TREE_OPERAND (t,2)
5348 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5349 || TREE_CODE (t) == BIT_FIELD_REF
5350 || TREE_CODE (t) == REALPART_EXPR
5351 || TREE_CODE (t) == IMAGPART_EXPR
5352 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5353 || CONVERT_EXPR_P (t))
5354 t = TREE_OPERAND (t, 0);
5356 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5358 t = get_base_address (t);
5359 if (t && DECL_P (t)
5360 && DECL_MODE (t) != BLKmode)
5361 TREE_ADDRESSABLE (t) = 1;
5364 *walk_subtrees = 0;
5367 return NULL_TREE;
5370 /* RTL expansion is not able to compile array references with variable
5371 offsets for arrays stored in single register. Discover such
5372 expressions and mark variables as addressable to avoid this
5373 scenario. */
5375 static void
5376 discover_nonconstant_array_refs (void)
5378 basic_block bb;
5379 gimple_stmt_iterator gsi;
5381 FOR_EACH_BB_FN (bb, cfun)
5382 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5384 gimple stmt = gsi_stmt (gsi);
5385 if (!is_gimple_debug (stmt))
5386 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5390 /* This function sets crtl->args.internal_arg_pointer to a virtual
5391 register if DRAP is needed. Local register allocator will replace
5392 virtual_incoming_args_rtx with the virtual register. */
5394 static void
5395 expand_stack_alignment (void)
5397 rtx drap_rtx;
5398 unsigned int preferred_stack_boundary;
5400 if (! SUPPORTS_STACK_ALIGNMENT)
5401 return;
5403 if (cfun->calls_alloca
5404 || cfun->has_nonlocal_label
5405 || crtl->has_nonlocal_goto)
5406 crtl->need_drap = true;
5408 /* Call update_stack_boundary here again to update incoming stack
5409 boundary. It may set incoming stack alignment to a different
5410 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5411 use the minimum incoming stack alignment to check if it is OK
5412 to perform sibcall optimization since sibcall optimization will
5413 only align the outgoing stack to incoming stack boundary. */
5414 if (targetm.calls.update_stack_boundary)
5415 targetm.calls.update_stack_boundary ();
5417 /* The incoming stack frame has to be aligned at least at
5418 parm_stack_boundary. */
5419 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5421 /* Update crtl->stack_alignment_estimated and use it later to align
5422 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5423 exceptions since callgraph doesn't collect incoming stack alignment
5424 in this case. */
5425 if (cfun->can_throw_non_call_exceptions
5426 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5427 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5428 else
5429 preferred_stack_boundary = crtl->preferred_stack_boundary;
5430 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5431 crtl->stack_alignment_estimated = preferred_stack_boundary;
5432 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5433 crtl->stack_alignment_needed = preferred_stack_boundary;
5435 gcc_assert (crtl->stack_alignment_needed
5436 <= crtl->stack_alignment_estimated);
5438 crtl->stack_realign_needed
5439 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5440 crtl->stack_realign_tried = crtl->stack_realign_needed;
5442 crtl->stack_realign_processed = true;
5444 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5445 alignment. */
5446 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5447 drap_rtx = targetm.calls.get_drap_rtx ();
5449 /* stack_realign_drap and drap_rtx must match. */
5450 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5452 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5453 if (NULL != drap_rtx)
5455 crtl->args.internal_arg_pointer = drap_rtx;
5457 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5458 needed. */
5459 fixup_tail_calls ();
5464 static void
5465 expand_main_function (void)
5467 #if (defined(INVOKE__main) \
5468 || (!defined(HAS_INIT_SECTION) \
5469 && !defined(INIT_SECTION_ASM_OP) \
5470 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5471 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5472 #endif
5476 /* Expand code to initialize the stack_protect_guard. This is invoked at
5477 the beginning of a function to be protected. */
5479 #ifndef HAVE_stack_protect_set
5480 # define HAVE_stack_protect_set 0
5481 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5482 #endif
5484 static void
5485 stack_protect_prologue (void)
5487 tree guard_decl = targetm.stack_protect_guard ();
5488 rtx x, y;
5490 x = expand_normal (crtl->stack_protect_guard);
5491 y = expand_normal (guard_decl);
5493 /* Allow the target to copy from Y to X without leaking Y into a
5494 register. */
5495 if (HAVE_stack_protect_set)
5497 rtx insn = gen_stack_protect_set (x, y);
5498 if (insn)
5500 emit_insn (insn);
5501 return;
5505 /* Otherwise do a straight move. */
5506 emit_move_insn (x, y);
5509 /* Translate the intermediate representation contained in the CFG
5510 from GIMPLE trees to RTL.
5512 We do conversion per basic block and preserve/update the tree CFG.
5513 This implies we have to do some magic as the CFG can simultaneously
5514 consist of basic blocks containing RTL and GIMPLE trees. This can
5515 confuse the CFG hooks, so be careful to not manipulate CFG during
5516 the expansion. */
5518 static unsigned int
5519 gimple_expand_cfg (void)
5521 basic_block bb, init_block;
5522 sbitmap blocks;
5523 edge_iterator ei;
5524 edge e;
5525 rtx var_seq, var_ret_seq;
5526 unsigned i;
5528 timevar_push (TV_OUT_OF_SSA);
5529 rewrite_out_of_ssa (&SA);
5530 timevar_pop (TV_OUT_OF_SSA);
5531 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5533 /* Make sure all values used by the optimization passes have sane
5534 defaults. */
5535 reg_renumber = 0;
5537 /* Some backends want to know that we are expanding to RTL. */
5538 currently_expanding_to_rtl = 1;
5539 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5540 free_dominance_info (CDI_DOMINATORS);
5542 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5544 insn_locations_init ();
5545 if (!DECL_IS_BUILTIN (current_function_decl))
5547 /* Eventually, all FEs should explicitly set function_start_locus. */
5548 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
5549 set_curr_insn_location
5550 (DECL_SOURCE_LOCATION (current_function_decl));
5551 else
5552 set_curr_insn_location (cfun->function_start_locus);
5554 else
5555 set_curr_insn_location (UNKNOWN_LOCATION);
5556 prologue_location = curr_insn_location ();
5558 #ifdef INSN_SCHEDULING
5559 init_sched_attrs ();
5560 #endif
5562 /* Make sure first insn is a note even if we don't want linenums.
5563 This makes sure the first insn will never be deleted.
5564 Also, final expects a note to appear there. */
5565 emit_note (NOTE_INSN_DELETED);
5567 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5568 discover_nonconstant_array_refs ();
5570 targetm.expand_to_rtl_hook ();
5571 crtl->stack_alignment_needed = STACK_BOUNDARY;
5572 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5573 crtl->stack_alignment_estimated = 0;
5574 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5575 cfun->cfg->max_jumptable_ents = 0;
5577 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5578 of the function section at exapnsion time to predict distance of calls. */
5579 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5581 /* Expand the variables recorded during gimple lowering. */
5582 timevar_push (TV_VAR_EXPAND);
5583 start_sequence ();
5585 var_ret_seq = expand_used_vars ();
5587 var_seq = get_insns ();
5588 end_sequence ();
5589 timevar_pop (TV_VAR_EXPAND);
5591 /* Honor stack protection warnings. */
5592 if (warn_stack_protect)
5594 if (cfun->calls_alloca)
5595 warning (OPT_Wstack_protector,
5596 "stack protector not protecting local variables: "
5597 "variable length buffer");
5598 if (has_short_buffer && !crtl->stack_protect_guard)
5599 warning (OPT_Wstack_protector,
5600 "stack protector not protecting function: "
5601 "all local arrays are less than %d bytes long",
5602 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5605 /* Set up parameters and prepare for return, for the function. */
5606 expand_function_start (current_function_decl);
5608 /* If we emitted any instructions for setting up the variables,
5609 emit them before the FUNCTION_START note. */
5610 if (var_seq)
5612 emit_insn_before (var_seq, parm_birth_insn);
5614 /* In expand_function_end we'll insert the alloca save/restore
5615 before parm_birth_insn. We've just insertted an alloca call.
5616 Adjust the pointer to match. */
5617 parm_birth_insn = var_seq;
5620 /* Now that we also have the parameter RTXs, copy them over to our
5621 partitions. */
5622 for (i = 0; i < SA.map->num_partitions; i++)
5624 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5626 if (TREE_CODE (var) != VAR_DECL
5627 && !SA.partition_to_pseudo[i])
5628 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5629 gcc_assert (SA.partition_to_pseudo[i]);
5631 /* If this decl was marked as living in multiple places, reset
5632 this now to NULL. */
5633 if (DECL_RTL_IF_SET (var) == pc_rtx)
5634 SET_DECL_RTL (var, NULL);
5636 /* Some RTL parts really want to look at DECL_RTL(x) when x
5637 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5638 SET_DECL_RTL here making this available, but that would mean
5639 to select one of the potentially many RTLs for one DECL. Instead
5640 of doing that we simply reset the MEM_EXPR of the RTL in question,
5641 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5642 if (!DECL_RTL_SET_P (var))
5644 if (MEM_P (SA.partition_to_pseudo[i]))
5645 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5649 /* If we have a class containing differently aligned pointers
5650 we need to merge those into the corresponding RTL pointer
5651 alignment. */
5652 for (i = 1; i < num_ssa_names; i++)
5654 tree name = ssa_name (i);
5655 int part;
5656 rtx r;
5658 if (!name
5659 /* We might have generated new SSA names in
5660 update_alias_info_with_stack_vars. They will have a NULL
5661 defining statements, and won't be part of the partitioning,
5662 so ignore those. */
5663 || !SSA_NAME_DEF_STMT (name))
5664 continue;
5665 part = var_to_partition (SA.map, name);
5666 if (part == NO_PARTITION)
5667 continue;
5669 /* Adjust all partition members to get the underlying decl of
5670 the representative which we might have created in expand_one_var. */
5671 if (SSA_NAME_VAR (name) == NULL_TREE)
5673 tree leader = partition_to_var (SA.map, part);
5674 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5675 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5677 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5678 continue;
5680 r = SA.partition_to_pseudo[part];
5681 if (REG_P (r))
5682 mark_reg_pointer (r, get_pointer_alignment (name));
5685 /* If this function is `main', emit a call to `__main'
5686 to run global initializers, etc. */
5687 if (DECL_NAME (current_function_decl)
5688 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5689 && DECL_FILE_SCOPE_P (current_function_decl))
5690 expand_main_function ();
5692 /* Initialize the stack_protect_guard field. This must happen after the
5693 call to __main (if any) so that the external decl is initialized. */
5694 if (crtl->stack_protect_guard)
5695 stack_protect_prologue ();
5697 expand_phi_nodes (&SA);
5699 /* Register rtl specific functions for cfg. */
5700 rtl_register_cfg_hooks ();
5702 init_block = construct_init_block ();
5704 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5705 remaining edges later. */
5706 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
5707 e->flags &= ~EDGE_EXECUTABLE;
5709 lab_rtx_for_bb = pointer_map_create ();
5710 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
5711 next_bb)
5712 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
5714 if (MAY_HAVE_DEBUG_INSNS)
5715 expand_debug_locations ();
5717 /* Free stuff we no longer need after GIMPLE optimizations. */
5718 free_dominance_info (CDI_DOMINATORS);
5719 free_dominance_info (CDI_POST_DOMINATORS);
5720 delete_tree_cfg_annotations ();
5722 timevar_push (TV_OUT_OF_SSA);
5723 finish_out_of_ssa (&SA);
5724 timevar_pop (TV_OUT_OF_SSA);
5726 timevar_push (TV_POST_EXPAND);
5727 /* We are no longer in SSA form. */
5728 cfun->gimple_df->in_ssa_p = false;
5729 if (current_loops)
5730 loops_state_clear (LOOP_CLOSED_SSA);
5732 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5733 conservatively to true until they are all profile aware. */
5734 pointer_map_destroy (lab_rtx_for_bb);
5735 free_histograms ();
5737 construct_exit_block ();
5738 insn_locations_finalize ();
5740 if (var_ret_seq)
5742 rtx after = return_label;
5743 rtx next = NEXT_INSN (after);
5744 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5745 after = next;
5746 emit_insn_after (var_ret_seq, after);
5749 /* Zap the tree EH table. */
5750 set_eh_throw_stmt_table (cfun, NULL);
5752 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5753 split edges which edge insertions might do. */
5754 rebuild_jump_labels (get_insns ());
5756 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
5757 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
5759 edge e;
5760 edge_iterator ei;
5761 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5763 if (e->insns.r)
5765 rebuild_jump_labels_chain (e->insns.r);
5766 /* Put insns after parm birth, but before
5767 NOTE_INSNS_FUNCTION_BEG. */
5768 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
5769 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5771 rtx insns = e->insns.r;
5772 e->insns.r = NULL_RTX;
5773 if (NOTE_P (parm_birth_insn)
5774 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5775 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5776 else
5777 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
5779 else
5780 commit_one_edge_insertion (e);
5782 else
5783 ei_next (&ei);
5787 /* We're done expanding trees to RTL. */
5788 currently_expanding_to_rtl = 0;
5790 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
5791 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
5793 edge e;
5794 edge_iterator ei;
5795 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5797 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5798 e->flags &= ~EDGE_EXECUTABLE;
5800 /* At the moment not all abnormal edges match the RTL
5801 representation. It is safe to remove them here as
5802 find_many_sub_basic_blocks will rediscover them.
5803 In the future we should get this fixed properly. */
5804 if ((e->flags & EDGE_ABNORMAL)
5805 && !(e->flags & EDGE_SIBCALL))
5806 remove_edge (e);
5807 else
5808 ei_next (&ei);
5812 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
5813 bitmap_ones (blocks);
5814 find_many_sub_basic_blocks (blocks);
5815 sbitmap_free (blocks);
5816 purge_all_dead_edges ();
5818 expand_stack_alignment ();
5820 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5821 function. */
5822 if (crtl->tail_call_emit)
5823 fixup_tail_calls ();
5825 /* After initial rtl generation, call back to finish generating
5826 exception support code. We need to do this before cleaning up
5827 the CFG as the code does not expect dead landing pads. */
5828 if (cfun->eh->region_tree != NULL)
5829 finish_eh_generation ();
5831 /* Remove unreachable blocks, otherwise we cannot compute dominators
5832 which are needed for loop state verification. As a side-effect
5833 this also compacts blocks.
5834 ??? We cannot remove trivially dead insns here as for example
5835 the DRAP reg on i?86 is not magically live at this point.
5836 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5837 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5839 #ifdef ENABLE_CHECKING
5840 verify_flow_info ();
5841 #endif
5843 /* Initialize pseudos allocated for hard registers. */
5844 emit_initial_value_sets ();
5846 /* And finally unshare all RTL. */
5847 unshare_all_rtl ();
5849 /* There's no need to defer outputting this function any more; we
5850 know we want to output it. */
5851 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5853 /* Now that we're done expanding trees to RTL, we shouldn't have any
5854 more CONCATs anywhere. */
5855 generating_concat_p = 0;
5857 if (dump_file)
5859 fprintf (dump_file,
5860 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5861 /* And the pass manager will dump RTL for us. */
5864 /* If we're emitting a nested function, make sure its parent gets
5865 emitted as well. Doing otherwise confuses debug info. */
5867 tree parent;
5868 for (parent = DECL_CONTEXT (current_function_decl);
5869 parent != NULL_TREE;
5870 parent = get_containing_scope (parent))
5871 if (TREE_CODE (parent) == FUNCTION_DECL)
5872 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
5875 /* We are now committed to emitting code for this function. Do any
5876 preparation, such as emitting abstract debug info for the inline
5877 before it gets mangled by optimization. */
5878 if (cgraph_function_possibly_inlined_p (current_function_decl))
5879 (*debug_hooks->outlining_inline_function) (current_function_decl);
5881 TREE_ASM_WRITTEN (current_function_decl) = 1;
5883 /* After expanding, the return labels are no longer needed. */
5884 return_label = NULL;
5885 naked_return_label = NULL;
5887 /* After expanding, the tm_restart map is no longer needed. */
5888 if (cfun->gimple_df->tm_restart)
5890 htab_delete (cfun->gimple_df->tm_restart);
5891 cfun->gimple_df->tm_restart = NULL;
5894 /* Tag the blocks with a depth number so that change_scope can find
5895 the common parent easily. */
5896 set_block_levels (DECL_INITIAL (cfun->decl), 0);
5897 default_rtl_profile ();
5899 timevar_pop (TV_POST_EXPAND);
5901 return 0;
5904 namespace {
5906 const pass_data pass_data_expand =
5908 RTL_PASS, /* type */
5909 "expand", /* name */
5910 OPTGROUP_NONE, /* optinfo_flags */
5911 false, /* has_gate */
5912 true, /* has_execute */
5913 TV_EXPAND, /* tv_id */
5914 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5915 | PROP_gimple_lcx
5916 | PROP_gimple_lvec ), /* properties_required */
5917 PROP_rtl, /* properties_provided */
5918 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5919 ( TODO_verify_ssa | TODO_verify_flow
5920 | TODO_verify_stmts ), /* todo_flags_start */
5921 0, /* todo_flags_finish */
5924 class pass_expand : public rtl_opt_pass
5926 public:
5927 pass_expand (gcc::context *ctxt)
5928 : rtl_opt_pass (pass_data_expand, ctxt)
5931 /* opt_pass methods: */
5932 unsigned int execute () { return gimple_expand_cfg (); }
5934 }; // class pass_expand
5936 } // anon namespace
5938 rtl_opt_pass *
5939 make_pass_expand (gcc::context *ctxt)
5941 return new pass_expand (ctxt);