Daily bump.
[official-gcc.git] / gcc / cfgexpand.c
blob14511e137aa446ead7e418c7052f655c7f70770e
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "varasm.h"
29 #include "stor-layout.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "basic-block.h"
34 #include "function.h"
35 #include "expr.h"
36 #include "langhooks.h"
37 #include "bitmap.h"
38 #include "pointer-set.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "tree-eh.h"
42 #include "gimple-expr.h"
43 #include "is-a.h"
44 #include "gimple.h"
45 #include "gimple-iterator.h"
46 #include "gimple-walk.h"
47 #include "gimple-ssa.h"
48 #include "cgraph.h"
49 #include "tree-cfg.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "tree-ssanames.h"
53 #include "tree-dfa.h"
54 #include "tree-ssa.h"
55 #include "tree-pass.h"
56 #include "except.h"
57 #include "flags.h"
58 #include "diagnostic.h"
59 #include "gimple-pretty-print.h"
60 #include "toplev.h"
61 #include "debug.h"
62 #include "params.h"
63 #include "tree-inline.h"
64 #include "value-prof.h"
65 #include "target.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
68 #include "sbitmap.h"
69 #include "cfgloop.h"
70 #include "regs.h" /* For reg_renumber. */
71 #include "insn-attr.h" /* For INSN_SCHEDULING. */
72 #include "asan.h"
73 #include "tree-ssa-address.h"
74 #include "recog.h"
75 #include "output.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
81 #ifndef NAME__MAIN
82 #define NAME__MAIN "__main"
83 #endif
85 /* This variable holds information helping the rewriting of SSA trees
86 into RTL. */
87 struct ssaexpand SA;
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple currently_expanding_gimple_stmt;
93 static rtx expand_debug_expr (tree);
95 /* Return an expression tree corresponding to the RHS of GIMPLE
96 statement STMT. */
98 tree
99 gimple_assign_rhs_to_tree (gimple stmt)
101 tree t;
102 enum gimple_rhs_class grhs_class;
104 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
106 if (grhs_class == GIMPLE_TERNARY_RHS)
107 t = build3 (gimple_assign_rhs_code (stmt),
108 TREE_TYPE (gimple_assign_lhs (stmt)),
109 gimple_assign_rhs1 (stmt),
110 gimple_assign_rhs2 (stmt),
111 gimple_assign_rhs3 (stmt));
112 else if (grhs_class == GIMPLE_BINARY_RHS)
113 t = build2 (gimple_assign_rhs_code (stmt),
114 TREE_TYPE (gimple_assign_lhs (stmt)),
115 gimple_assign_rhs1 (stmt),
116 gimple_assign_rhs2 (stmt));
117 else if (grhs_class == GIMPLE_UNARY_RHS)
118 t = build1 (gimple_assign_rhs_code (stmt),
119 TREE_TYPE (gimple_assign_lhs (stmt)),
120 gimple_assign_rhs1 (stmt));
121 else if (grhs_class == GIMPLE_SINGLE_RHS)
123 t = gimple_assign_rhs1 (stmt);
124 /* Avoid modifying this tree in place below. */
125 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
126 && gimple_location (stmt) != EXPR_LOCATION (t))
127 || (gimple_block (stmt)
128 && currently_expanding_to_rtl
129 && EXPR_P (t)))
130 t = copy_node (t);
132 else
133 gcc_unreachable ();
135 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
136 SET_EXPR_LOCATION (t, gimple_location (stmt));
138 return t;
142 #ifndef STACK_ALIGNMENT_NEEDED
143 #define STACK_ALIGNMENT_NEEDED 1
144 #endif
146 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
148 /* Associate declaration T with storage space X. If T is no
149 SSA name this is exactly SET_DECL_RTL, otherwise make the
150 partition of T associated with X. */
151 static inline void
152 set_rtl (tree t, rtx x)
154 if (TREE_CODE (t) == SSA_NAME)
156 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
157 if (x && !MEM_P (x))
158 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
159 /* For the benefit of debug information at -O0 (where vartracking
160 doesn't run) record the place also in the base DECL if it's
161 a normal variable (not a parameter). */
162 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
164 tree var = SSA_NAME_VAR (t);
165 /* If we don't yet have something recorded, just record it now. */
166 if (!DECL_RTL_SET_P (var))
167 SET_DECL_RTL (var, x);
168 /* If we have it set already to "multiple places" don't
169 change this. */
170 else if (DECL_RTL (var) == pc_rtx)
172 /* If we have something recorded and it's not the same place
173 as we want to record now, we have multiple partitions for the
174 same base variable, with different places. We can't just
175 randomly chose one, hence we have to say that we don't know.
176 This only happens with optimization, and there var-tracking
177 will figure out the right thing. */
178 else if (DECL_RTL (var) != x)
179 SET_DECL_RTL (var, pc_rtx);
182 else
183 SET_DECL_RTL (t, x);
186 /* This structure holds data relevant to one variable that will be
187 placed in a stack slot. */
188 struct stack_var
190 /* The Variable. */
191 tree decl;
193 /* Initially, the size of the variable. Later, the size of the partition,
194 if this variable becomes it's partition's representative. */
195 HOST_WIDE_INT size;
197 /* The *byte* alignment required for this variable. Or as, with the
198 size, the alignment for this partition. */
199 unsigned int alignb;
201 /* The partition representative. */
202 size_t representative;
204 /* The next stack variable in the partition, or EOC. */
205 size_t next;
207 /* The numbers of conflicting stack variables. */
208 bitmap conflicts;
211 #define EOC ((size_t)-1)
213 /* We have an array of such objects while deciding allocation. */
214 static struct stack_var *stack_vars;
215 static size_t stack_vars_alloc;
216 static size_t stack_vars_num;
217 static struct pointer_map_t *decl_to_stack_part;
219 /* Conflict bitmaps go on this obstack. This allows us to destroy
220 all of them in one big sweep. */
221 static bitmap_obstack stack_var_bitmap_obstack;
223 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
224 is non-decreasing. */
225 static size_t *stack_vars_sorted;
227 /* The phase of the stack frame. This is the known misalignment of
228 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
229 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
230 static int frame_phase;
232 /* Used during expand_used_vars to remember if we saw any decls for
233 which we'd like to enable stack smashing protection. */
234 static bool has_protected_decls;
236 /* Used during expand_used_vars. Remember if we say a character buffer
237 smaller than our cutoff threshold. Used for -Wstack-protector. */
238 static bool has_short_buffer;
240 /* Compute the byte alignment to use for DECL. Ignore alignment
241 we can't do with expected alignment of the stack boundary. */
243 static unsigned int
244 align_local_variable (tree decl)
246 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
247 DECL_ALIGN (decl) = align;
248 return align / BITS_PER_UNIT;
251 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
252 Return the frame offset. */
254 static HOST_WIDE_INT
255 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
257 HOST_WIDE_INT offset, new_frame_offset;
259 new_frame_offset = frame_offset;
260 if (FRAME_GROWS_DOWNWARD)
262 new_frame_offset -= size + frame_phase;
263 new_frame_offset &= -align;
264 new_frame_offset += frame_phase;
265 offset = new_frame_offset;
267 else
269 new_frame_offset -= frame_phase;
270 new_frame_offset += align - 1;
271 new_frame_offset &= -align;
272 new_frame_offset += frame_phase;
273 offset = new_frame_offset;
274 new_frame_offset += size;
276 frame_offset = new_frame_offset;
278 if (frame_offset_overflow (frame_offset, cfun->decl))
279 frame_offset = offset = 0;
281 return offset;
284 /* Accumulate DECL into STACK_VARS. */
286 static void
287 add_stack_var (tree decl)
289 struct stack_var *v;
291 if (stack_vars_num >= stack_vars_alloc)
293 if (stack_vars_alloc)
294 stack_vars_alloc = stack_vars_alloc * 3 / 2;
295 else
296 stack_vars_alloc = 32;
297 stack_vars
298 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
300 if (!decl_to_stack_part)
301 decl_to_stack_part = pointer_map_create ();
303 v = &stack_vars[stack_vars_num];
304 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
306 v->decl = decl;
307 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
308 /* Ensure that all variables have size, so that &a != &b for any two
309 variables that are simultaneously live. */
310 if (v->size == 0)
311 v->size = 1;
312 v->alignb = align_local_variable (SSAVAR (decl));
313 /* An alignment of zero can mightily confuse us later. */
314 gcc_assert (v->alignb != 0);
316 /* All variables are initially in their own partition. */
317 v->representative = stack_vars_num;
318 v->next = EOC;
320 /* All variables initially conflict with no other. */
321 v->conflicts = NULL;
323 /* Ensure that this decl doesn't get put onto the list twice. */
324 set_rtl (decl, pc_rtx);
326 stack_vars_num++;
329 /* Make the decls associated with luid's X and Y conflict. */
331 static void
332 add_stack_var_conflict (size_t x, size_t y)
334 struct stack_var *a = &stack_vars[x];
335 struct stack_var *b = &stack_vars[y];
336 if (!a->conflicts)
337 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
338 if (!b->conflicts)
339 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
340 bitmap_set_bit (a->conflicts, y);
341 bitmap_set_bit (b->conflicts, x);
344 /* Check whether the decls associated with luid's X and Y conflict. */
346 static bool
347 stack_var_conflict_p (size_t x, size_t y)
349 struct stack_var *a = &stack_vars[x];
350 struct stack_var *b = &stack_vars[y];
351 if (x == y)
352 return false;
353 /* Partitions containing an SSA name result from gimple registers
354 with things like unsupported modes. They are top-level and
355 hence conflict with everything else. */
356 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
357 return true;
359 if (!a->conflicts || !b->conflicts)
360 return false;
361 return bitmap_bit_p (a->conflicts, y);
364 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
365 enter its partition number into bitmap DATA. */
367 static bool
368 visit_op (gimple, tree op, tree, void *data)
370 bitmap active = (bitmap)data;
371 op = get_base_address (op);
372 if (op
373 && DECL_P (op)
374 && DECL_RTL_IF_SET (op) == pc_rtx)
376 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
377 if (v)
378 bitmap_set_bit (active, *v);
380 return false;
383 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
384 record conflicts between it and all currently active other partitions
385 from bitmap DATA. */
387 static bool
388 visit_conflict (gimple, tree op, tree, void *data)
390 bitmap active = (bitmap)data;
391 op = get_base_address (op);
392 if (op
393 && DECL_P (op)
394 && DECL_RTL_IF_SET (op) == pc_rtx)
396 size_t *v =
397 (size_t *) pointer_map_contains (decl_to_stack_part, op);
398 if (v && bitmap_set_bit (active, *v))
400 size_t num = *v;
401 bitmap_iterator bi;
402 unsigned i;
403 gcc_assert (num < stack_vars_num);
404 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
405 add_stack_var_conflict (num, i);
408 return false;
411 /* Helper routine for add_scope_conflicts, calculating the active partitions
412 at the end of BB, leaving the result in WORK. We're called to generate
413 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
414 liveness. */
416 static void
417 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
419 edge e;
420 edge_iterator ei;
421 gimple_stmt_iterator gsi;
422 walk_stmt_load_store_addr_fn visit;
424 bitmap_clear (work);
425 FOR_EACH_EDGE (e, ei, bb->preds)
426 bitmap_ior_into (work, (bitmap)e->src->aux);
428 visit = visit_op;
430 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
432 gimple stmt = gsi_stmt (gsi);
433 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
435 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
437 gimple stmt = gsi_stmt (gsi);
439 if (gimple_clobber_p (stmt))
441 tree lhs = gimple_assign_lhs (stmt);
442 size_t *v;
443 /* Nested function lowering might introduce LHSs
444 that are COMPONENT_REFs. */
445 if (TREE_CODE (lhs) != VAR_DECL)
446 continue;
447 if (DECL_RTL_IF_SET (lhs) == pc_rtx
448 && (v = (size_t *)
449 pointer_map_contains (decl_to_stack_part, lhs)))
450 bitmap_clear_bit (work, *v);
452 else if (!is_gimple_debug (stmt))
454 if (for_conflict
455 && visit == visit_op)
457 /* If this is the first real instruction in this BB we need
458 to add conflicts for everything live at this point now.
459 Unlike classical liveness for named objects we can't
460 rely on seeing a def/use of the names we're interested in.
461 There might merely be indirect loads/stores. We'd not add any
462 conflicts for such partitions. */
463 bitmap_iterator bi;
464 unsigned i;
465 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
467 struct stack_var *a = &stack_vars[i];
468 if (!a->conflicts)
469 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
470 bitmap_ior_into (a->conflicts, work);
472 visit = visit_conflict;
474 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
479 /* Generate stack partition conflicts between all partitions that are
480 simultaneously live. */
482 static void
483 add_scope_conflicts (void)
485 basic_block bb;
486 bool changed;
487 bitmap work = BITMAP_ALLOC (NULL);
488 int *rpo;
489 int n_bbs;
491 /* We approximate the live range of a stack variable by taking the first
492 mention of its name as starting point(s), and by the end-of-scope
493 death clobber added by gimplify as ending point(s) of the range.
494 This overapproximates in the case we for instance moved an address-taken
495 operation upward, without also moving a dereference to it upwards.
496 But it's conservatively correct as a variable never can hold values
497 before its name is mentioned at least once.
499 We then do a mostly classical bitmap liveness algorithm. */
501 FOR_ALL_BB_FN (bb, cfun)
502 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
504 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
505 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
507 changed = true;
508 while (changed)
510 int i;
511 changed = false;
512 for (i = 0; i < n_bbs; i++)
514 bitmap active;
515 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
516 active = (bitmap)bb->aux;
517 add_scope_conflicts_1 (bb, work, false);
518 if (bitmap_ior_into (active, work))
519 changed = true;
523 FOR_EACH_BB_FN (bb, cfun)
524 add_scope_conflicts_1 (bb, work, true);
526 free (rpo);
527 BITMAP_FREE (work);
528 FOR_ALL_BB_FN (bb, cfun)
529 BITMAP_FREE (bb->aux);
532 /* A subroutine of partition_stack_vars. A comparison function for qsort,
533 sorting an array of indices by the properties of the object. */
535 static int
536 stack_var_cmp (const void *a, const void *b)
538 size_t ia = *(const size_t *)a;
539 size_t ib = *(const size_t *)b;
540 unsigned int aligna = stack_vars[ia].alignb;
541 unsigned int alignb = stack_vars[ib].alignb;
542 HOST_WIDE_INT sizea = stack_vars[ia].size;
543 HOST_WIDE_INT sizeb = stack_vars[ib].size;
544 tree decla = stack_vars[ia].decl;
545 tree declb = stack_vars[ib].decl;
546 bool largea, largeb;
547 unsigned int uida, uidb;
549 /* Primary compare on "large" alignment. Large comes first. */
550 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
551 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
552 if (largea != largeb)
553 return (int)largeb - (int)largea;
555 /* Secondary compare on size, decreasing */
556 if (sizea > sizeb)
557 return -1;
558 if (sizea < sizeb)
559 return 1;
561 /* Tertiary compare on true alignment, decreasing. */
562 if (aligna < alignb)
563 return -1;
564 if (aligna > alignb)
565 return 1;
567 /* Final compare on ID for sort stability, increasing.
568 Two SSA names are compared by their version, SSA names come before
569 non-SSA names, and two normal decls are compared by their DECL_UID. */
570 if (TREE_CODE (decla) == SSA_NAME)
572 if (TREE_CODE (declb) == SSA_NAME)
573 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
574 else
575 return -1;
577 else if (TREE_CODE (declb) == SSA_NAME)
578 return 1;
579 else
580 uida = DECL_UID (decla), uidb = DECL_UID (declb);
581 if (uida < uidb)
582 return 1;
583 if (uida > uidb)
584 return -1;
585 return 0;
589 /* If the points-to solution *PI points to variables that are in a partition
590 together with other variables add all partition members to the pointed-to
591 variables bitmap. */
593 static void
594 add_partitioned_vars_to_ptset (struct pt_solution *pt,
595 struct pointer_map_t *decls_to_partitions,
596 struct pointer_set_t *visited, bitmap temp)
598 bitmap_iterator bi;
599 unsigned i;
600 bitmap *part;
602 if (pt->anything
603 || pt->vars == NULL
604 /* The pointed-to vars bitmap is shared, it is enough to
605 visit it once. */
606 || pointer_set_insert (visited, pt->vars))
607 return;
609 bitmap_clear (temp);
611 /* By using a temporary bitmap to store all members of the partitions
612 we have to add we make sure to visit each of the partitions only
613 once. */
614 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
615 if ((!temp
616 || !bitmap_bit_p (temp, i))
617 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
618 (void *)(size_t) i)))
619 bitmap_ior_into (temp, *part);
620 if (!bitmap_empty_p (temp))
621 bitmap_ior_into (pt->vars, temp);
624 /* Update points-to sets based on partition info, so we can use them on RTL.
625 The bitmaps representing stack partitions will be saved until expand,
626 where partitioned decls used as bases in memory expressions will be
627 rewritten. */
629 static void
630 update_alias_info_with_stack_vars (void)
632 struct pointer_map_t *decls_to_partitions = NULL;
633 size_t i, j;
634 tree var = NULL_TREE;
636 for (i = 0; i < stack_vars_num; i++)
638 bitmap part = NULL;
639 tree name;
640 struct ptr_info_def *pi;
642 /* Not interested in partitions with single variable. */
643 if (stack_vars[i].representative != i
644 || stack_vars[i].next == EOC)
645 continue;
647 if (!decls_to_partitions)
649 decls_to_partitions = pointer_map_create ();
650 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
653 /* Create an SSA_NAME that points to the partition for use
654 as base during alias-oracle queries on RTL for bases that
655 have been partitioned. */
656 if (var == NULL_TREE)
657 var = create_tmp_var (ptr_type_node, NULL);
658 name = make_ssa_name (var, NULL);
660 /* Create bitmaps representing partitions. They will be used for
661 points-to sets later, so use GGC alloc. */
662 part = BITMAP_GGC_ALLOC ();
663 for (j = i; j != EOC; j = stack_vars[j].next)
665 tree decl = stack_vars[j].decl;
666 unsigned int uid = DECL_PT_UID (decl);
667 bitmap_set_bit (part, uid);
668 *((bitmap *) pointer_map_insert (decls_to_partitions,
669 (void *)(size_t) uid)) = part;
670 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
671 decl)) = name;
672 if (TREE_ADDRESSABLE (decl))
673 TREE_ADDRESSABLE (name) = 1;
676 /* Make the SSA name point to all partition members. */
677 pi = get_ptr_info (name);
678 pt_solution_set (&pi->pt, part, false);
681 /* Make all points-to sets that contain one member of a partition
682 contain all members of the partition. */
683 if (decls_to_partitions)
685 unsigned i;
686 struct pointer_set_t *visited = pointer_set_create ();
687 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
689 for (i = 1; i < num_ssa_names; i++)
691 tree name = ssa_name (i);
692 struct ptr_info_def *pi;
694 if (name
695 && POINTER_TYPE_P (TREE_TYPE (name))
696 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
697 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
698 visited, temp);
701 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
702 decls_to_partitions, visited, temp);
704 pointer_set_destroy (visited);
705 pointer_map_destroy (decls_to_partitions);
706 BITMAP_FREE (temp);
710 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
711 partitioning algorithm. Partitions A and B are known to be non-conflicting.
712 Merge them into a single partition A. */
714 static void
715 union_stack_vars (size_t a, size_t b)
717 struct stack_var *vb = &stack_vars[b];
718 bitmap_iterator bi;
719 unsigned u;
721 gcc_assert (stack_vars[b].next == EOC);
722 /* Add B to A's partition. */
723 stack_vars[b].next = stack_vars[a].next;
724 stack_vars[b].representative = a;
725 stack_vars[a].next = b;
727 /* Update the required alignment of partition A to account for B. */
728 if (stack_vars[a].alignb < stack_vars[b].alignb)
729 stack_vars[a].alignb = stack_vars[b].alignb;
731 /* Update the interference graph and merge the conflicts. */
732 if (vb->conflicts)
734 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
735 add_stack_var_conflict (a, stack_vars[u].representative);
736 BITMAP_FREE (vb->conflicts);
740 /* A subroutine of expand_used_vars. Binpack the variables into
741 partitions constrained by the interference graph. The overall
742 algorithm used is as follows:
744 Sort the objects by size in descending order.
745 For each object A {
746 S = size(A)
747 O = 0
748 loop {
749 Look for the largest non-conflicting object B with size <= S.
750 UNION (A, B)
755 static void
756 partition_stack_vars (void)
758 size_t si, sj, n = stack_vars_num;
760 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
761 for (si = 0; si < n; ++si)
762 stack_vars_sorted[si] = si;
764 if (n == 1)
765 return;
767 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
769 for (si = 0; si < n; ++si)
771 size_t i = stack_vars_sorted[si];
772 unsigned int ialign = stack_vars[i].alignb;
773 HOST_WIDE_INT isize = stack_vars[i].size;
775 /* Ignore objects that aren't partition representatives. If we
776 see a var that is not a partition representative, it must
777 have been merged earlier. */
778 if (stack_vars[i].representative != i)
779 continue;
781 for (sj = si + 1; sj < n; ++sj)
783 size_t j = stack_vars_sorted[sj];
784 unsigned int jalign = stack_vars[j].alignb;
785 HOST_WIDE_INT jsize = stack_vars[j].size;
787 /* Ignore objects that aren't partition representatives. */
788 if (stack_vars[j].representative != j)
789 continue;
791 /* Do not mix objects of "small" (supported) alignment
792 and "large" (unsupported) alignment. */
793 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
794 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
795 break;
797 /* For Address Sanitizer do not mix objects with different
798 sizes, as the shorter vars wouldn't be adequately protected.
799 Don't do that for "large" (unsupported) alignment objects,
800 those aren't protected anyway. */
801 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
802 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
803 break;
805 /* Ignore conflicting objects. */
806 if (stack_var_conflict_p (i, j))
807 continue;
809 /* UNION the objects, placing J at OFFSET. */
810 union_stack_vars (i, j);
814 update_alias_info_with_stack_vars ();
817 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
819 static void
820 dump_stack_var_partition (void)
822 size_t si, i, j, n = stack_vars_num;
824 for (si = 0; si < n; ++si)
826 i = stack_vars_sorted[si];
828 /* Skip variables that aren't partition representatives, for now. */
829 if (stack_vars[i].representative != i)
830 continue;
832 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
833 " align %u\n", (unsigned long) i, stack_vars[i].size,
834 stack_vars[i].alignb);
836 for (j = i; j != EOC; j = stack_vars[j].next)
838 fputc ('\t', dump_file);
839 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
841 fputc ('\n', dump_file);
845 /* Assign rtl to DECL at BASE + OFFSET. */
847 static void
848 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
849 HOST_WIDE_INT offset)
851 unsigned align;
852 rtx x;
854 /* If this fails, we've overflowed the stack frame. Error nicely? */
855 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
857 x = plus_constant (Pmode, base, offset);
858 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
860 if (TREE_CODE (decl) != SSA_NAME)
862 /* Set alignment we actually gave this decl if it isn't an SSA name.
863 If it is we generate stack slots only accidentally so it isn't as
864 important, we'll simply use the alignment that is already set. */
865 if (base == virtual_stack_vars_rtx)
866 offset -= frame_phase;
867 align = offset & -offset;
868 align *= BITS_PER_UNIT;
869 if (align == 0 || align > base_align)
870 align = base_align;
872 /* One would think that we could assert that we're not decreasing
873 alignment here, but (at least) the i386 port does exactly this
874 via the MINIMUM_ALIGNMENT hook. */
876 DECL_ALIGN (decl) = align;
877 DECL_USER_ALIGN (decl) = 0;
880 set_mem_attributes (x, SSAVAR (decl), true);
881 set_rtl (decl, x);
884 struct stack_vars_data
886 /* Vector of offset pairs, always end of some padding followed
887 by start of the padding that needs Address Sanitizer protection.
888 The vector is in reversed, highest offset pairs come first. */
889 vec<HOST_WIDE_INT> asan_vec;
891 /* Vector of partition representative decls in between the paddings. */
892 vec<tree> asan_decl_vec;
894 /* Base pseudo register for Address Sanitizer protected automatic vars. */
895 rtx asan_base;
897 /* Alignment needed for the Address Sanitizer protected automatic vars. */
898 unsigned int asan_alignb;
901 /* A subroutine of expand_used_vars. Give each partition representative
902 a unique location within the stack frame. Update each partition member
903 with that location. */
905 static void
906 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
908 size_t si, i, j, n = stack_vars_num;
909 HOST_WIDE_INT large_size = 0, large_alloc = 0;
910 rtx large_base = NULL;
911 unsigned large_align = 0;
912 tree decl;
914 /* Determine if there are any variables requiring "large" alignment.
915 Since these are dynamically allocated, we only process these if
916 no predicate involved. */
917 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
918 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
920 /* Find the total size of these variables. */
921 for (si = 0; si < n; ++si)
923 unsigned alignb;
925 i = stack_vars_sorted[si];
926 alignb = stack_vars[i].alignb;
928 /* Stop when we get to the first decl with "small" alignment. */
929 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
930 break;
932 /* Skip variables that aren't partition representatives. */
933 if (stack_vars[i].representative != i)
934 continue;
936 /* Skip variables that have already had rtl assigned. See also
937 add_stack_var where we perpetrate this pc_rtx hack. */
938 decl = stack_vars[i].decl;
939 if ((TREE_CODE (decl) == SSA_NAME
940 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
941 : DECL_RTL (decl)) != pc_rtx)
942 continue;
944 large_size += alignb - 1;
945 large_size &= -(HOST_WIDE_INT)alignb;
946 large_size += stack_vars[i].size;
949 /* If there were any, allocate space. */
950 if (large_size > 0)
951 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
952 large_align, true);
955 for (si = 0; si < n; ++si)
957 rtx base;
958 unsigned base_align, alignb;
959 HOST_WIDE_INT offset;
961 i = stack_vars_sorted[si];
963 /* Skip variables that aren't partition representatives, for now. */
964 if (stack_vars[i].representative != i)
965 continue;
967 /* Skip variables that have already had rtl assigned. See also
968 add_stack_var where we perpetrate this pc_rtx hack. */
969 decl = stack_vars[i].decl;
970 if ((TREE_CODE (decl) == SSA_NAME
971 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
972 : DECL_RTL (decl)) != pc_rtx)
973 continue;
975 /* Check the predicate to see whether this variable should be
976 allocated in this pass. */
977 if (pred && !pred (i))
978 continue;
980 alignb = stack_vars[i].alignb;
981 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
983 base = virtual_stack_vars_rtx;
984 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
986 HOST_WIDE_INT prev_offset = frame_offset;
987 tree repr_decl = NULL_TREE;
989 offset
990 = alloc_stack_frame_space (stack_vars[i].size
991 + ASAN_RED_ZONE_SIZE,
992 MAX (alignb, ASAN_RED_ZONE_SIZE));
993 data->asan_vec.safe_push (prev_offset);
994 data->asan_vec.safe_push (offset + stack_vars[i].size);
995 /* Find best representative of the partition.
996 Prefer those with DECL_NAME, even better
997 satisfying asan_protect_stack_decl predicate. */
998 for (j = i; j != EOC; j = stack_vars[j].next)
999 if (asan_protect_stack_decl (stack_vars[j].decl)
1000 && DECL_NAME (stack_vars[j].decl))
1002 repr_decl = stack_vars[j].decl;
1003 break;
1005 else if (repr_decl == NULL_TREE
1006 && DECL_P (stack_vars[j].decl)
1007 && DECL_NAME (stack_vars[j].decl))
1008 repr_decl = stack_vars[j].decl;
1009 if (repr_decl == NULL_TREE)
1010 repr_decl = stack_vars[i].decl;
1011 data->asan_decl_vec.safe_push (repr_decl);
1012 data->asan_alignb = MAX (data->asan_alignb, alignb);
1013 if (data->asan_base == NULL)
1014 data->asan_base = gen_reg_rtx (Pmode);
1015 base = data->asan_base;
1017 if (!STRICT_ALIGNMENT)
1018 base_align = crtl->max_used_stack_slot_alignment;
1019 else
1020 base_align = MAX (crtl->max_used_stack_slot_alignment,
1021 GET_MODE_ALIGNMENT (SImode)
1022 << ASAN_SHADOW_SHIFT);
1024 else
1026 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1027 base_align = crtl->max_used_stack_slot_alignment;
1030 else
1032 /* Large alignment is only processed in the last pass. */
1033 if (pred)
1034 continue;
1035 gcc_assert (large_base != NULL);
1037 large_alloc += alignb - 1;
1038 large_alloc &= -(HOST_WIDE_INT)alignb;
1039 offset = large_alloc;
1040 large_alloc += stack_vars[i].size;
1042 base = large_base;
1043 base_align = large_align;
1046 /* Create rtl for each variable based on their location within the
1047 partition. */
1048 for (j = i; j != EOC; j = stack_vars[j].next)
1050 expand_one_stack_var_at (stack_vars[j].decl,
1051 base, base_align,
1052 offset);
1056 gcc_assert (large_alloc == large_size);
1059 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1060 static HOST_WIDE_INT
1061 account_stack_vars (void)
1063 size_t si, j, i, n = stack_vars_num;
1064 HOST_WIDE_INT size = 0;
1066 for (si = 0; si < n; ++si)
1068 i = stack_vars_sorted[si];
1070 /* Skip variables that aren't partition representatives, for now. */
1071 if (stack_vars[i].representative != i)
1072 continue;
1074 size += stack_vars[i].size;
1075 for (j = i; j != EOC; j = stack_vars[j].next)
1076 set_rtl (stack_vars[j].decl, NULL);
1078 return size;
1081 /* A subroutine of expand_one_var. Called to immediately assign rtl
1082 to a variable to be allocated in the stack frame. */
1084 static void
1085 expand_one_stack_var (tree var)
1087 HOST_WIDE_INT size, offset;
1088 unsigned byte_align;
1090 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1091 byte_align = align_local_variable (SSAVAR (var));
1093 /* We handle highly aligned variables in expand_stack_vars. */
1094 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1096 offset = alloc_stack_frame_space (size, byte_align);
1098 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1099 crtl->max_used_stack_slot_alignment, offset);
1102 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1103 that will reside in a hard register. */
1105 static void
1106 expand_one_hard_reg_var (tree var)
1108 rest_of_decl_compilation (var, 0, 0);
1111 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1112 that will reside in a pseudo register. */
1114 static void
1115 expand_one_register_var (tree var)
1117 tree decl = SSAVAR (var);
1118 tree type = TREE_TYPE (decl);
1119 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1120 rtx x = gen_reg_rtx (reg_mode);
1122 set_rtl (var, x);
1124 /* Note if the object is a user variable. */
1125 if (!DECL_ARTIFICIAL (decl))
1126 mark_user_reg (x);
1128 if (POINTER_TYPE_P (type))
1129 mark_reg_pointer (x, get_pointer_alignment (var));
1132 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1133 has some associated error, e.g. its type is error-mark. We just need
1134 to pick something that won't crash the rest of the compiler. */
1136 static void
1137 expand_one_error_var (tree var)
1139 enum machine_mode mode = DECL_MODE (var);
1140 rtx x;
1142 if (mode == BLKmode)
1143 x = gen_rtx_MEM (BLKmode, const0_rtx);
1144 else if (mode == VOIDmode)
1145 x = const0_rtx;
1146 else
1147 x = gen_reg_rtx (mode);
1149 SET_DECL_RTL (var, x);
1152 /* A subroutine of expand_one_var. VAR is a variable that will be
1153 allocated to the local stack frame. Return true if we wish to
1154 add VAR to STACK_VARS so that it will be coalesced with other
1155 variables. Return false to allocate VAR immediately.
1157 This function is used to reduce the number of variables considered
1158 for coalescing, which reduces the size of the quadratic problem. */
1160 static bool
1161 defer_stack_allocation (tree var, bool toplevel)
1163 /* Whether the variable is small enough for immediate allocation not to be
1164 a problem with regard to the frame size. */
1165 bool smallish
1166 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1167 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1169 /* If stack protection is enabled, *all* stack variables must be deferred,
1170 so that we can re-order the strings to the top of the frame.
1171 Similarly for Address Sanitizer. */
1172 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1173 return true;
1175 /* We handle "large" alignment via dynamic allocation. We want to handle
1176 this extra complication in only one place, so defer them. */
1177 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1178 return true;
1180 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1181 might be detached from their block and appear at toplevel when we reach
1182 here. We want to coalesce them with variables from other blocks when
1183 the immediate contribution to the frame size would be noticeable. */
1184 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1185 return true;
1187 /* Variables declared in the outermost scope automatically conflict
1188 with every other variable. The only reason to want to defer them
1189 at all is that, after sorting, we can more efficiently pack
1190 small variables in the stack frame. Continue to defer at -O2. */
1191 if (toplevel && optimize < 2)
1192 return false;
1194 /* Without optimization, *most* variables are allocated from the
1195 stack, which makes the quadratic problem large exactly when we
1196 want compilation to proceed as quickly as possible. On the
1197 other hand, we don't want the function's stack frame size to
1198 get completely out of hand. So we avoid adding scalars and
1199 "small" aggregates to the list at all. */
1200 if (optimize == 0 && smallish)
1201 return false;
1203 return true;
1206 /* A subroutine of expand_used_vars. Expand one variable according to
1207 its flavor. Variables to be placed on the stack are not actually
1208 expanded yet, merely recorded.
1209 When REALLY_EXPAND is false, only add stack values to be allocated.
1210 Return stack usage this variable is supposed to take.
1213 static HOST_WIDE_INT
1214 expand_one_var (tree var, bool toplevel, bool really_expand)
1216 unsigned int align = BITS_PER_UNIT;
1217 tree origvar = var;
1219 var = SSAVAR (var);
1221 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1223 /* Because we don't know if VAR will be in register or on stack,
1224 we conservatively assume it will be on stack even if VAR is
1225 eventually put into register after RA pass. For non-automatic
1226 variables, which won't be on stack, we collect alignment of
1227 type and ignore user specified alignment. Similarly for
1228 SSA_NAMEs for which use_register_for_decl returns true. */
1229 if (TREE_STATIC (var)
1230 || DECL_EXTERNAL (var)
1231 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1232 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1233 TYPE_MODE (TREE_TYPE (var)),
1234 TYPE_ALIGN (TREE_TYPE (var)));
1235 else if (DECL_HAS_VALUE_EXPR_P (var)
1236 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1237 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1238 or variables which were assigned a stack slot already by
1239 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1240 changed from the offset chosen to it. */
1241 align = crtl->stack_alignment_estimated;
1242 else
1243 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1245 /* If the variable alignment is very large we'll dynamicaly allocate
1246 it, which means that in-frame portion is just a pointer. */
1247 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1248 align = POINTER_SIZE;
1251 if (SUPPORTS_STACK_ALIGNMENT
1252 && crtl->stack_alignment_estimated < align)
1254 /* stack_alignment_estimated shouldn't change after stack
1255 realign decision made */
1256 gcc_assert (!crtl->stack_realign_processed);
1257 crtl->stack_alignment_estimated = align;
1260 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1261 So here we only make sure stack_alignment_needed >= align. */
1262 if (crtl->stack_alignment_needed < align)
1263 crtl->stack_alignment_needed = align;
1264 if (crtl->max_used_stack_slot_alignment < align)
1265 crtl->max_used_stack_slot_alignment = align;
1267 if (TREE_CODE (origvar) == SSA_NAME)
1269 gcc_assert (TREE_CODE (var) != VAR_DECL
1270 || (!DECL_EXTERNAL (var)
1271 && !DECL_HAS_VALUE_EXPR_P (var)
1272 && !TREE_STATIC (var)
1273 && TREE_TYPE (var) != error_mark_node
1274 && !DECL_HARD_REGISTER (var)
1275 && really_expand));
1277 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1279 else if (DECL_EXTERNAL (var))
1281 else if (DECL_HAS_VALUE_EXPR_P (var))
1283 else if (TREE_STATIC (var))
1285 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1287 else if (TREE_TYPE (var) == error_mark_node)
1289 if (really_expand)
1290 expand_one_error_var (var);
1292 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1294 if (really_expand)
1295 expand_one_hard_reg_var (var);
1297 else if (use_register_for_decl (var))
1299 if (really_expand)
1300 expand_one_register_var (origvar);
1302 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1304 /* Reject variables which cover more than half of the address-space. */
1305 if (really_expand)
1307 error ("size of variable %q+D is too large", var);
1308 expand_one_error_var (var);
1311 else if (defer_stack_allocation (var, toplevel))
1312 add_stack_var (origvar);
1313 else
1315 if (really_expand)
1316 expand_one_stack_var (origvar);
1317 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1319 return 0;
1322 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1323 expanding variables. Those variables that can be put into registers
1324 are allocated pseudos; those that can't are put on the stack.
1326 TOPLEVEL is true if this is the outermost BLOCK. */
1328 static void
1329 expand_used_vars_for_block (tree block, bool toplevel)
1331 tree t;
1333 /* Expand all variables at this level. */
1334 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1335 if (TREE_USED (t)
1336 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1337 || !DECL_NONSHAREABLE (t)))
1338 expand_one_var (t, toplevel, true);
1340 /* Expand all variables at containing levels. */
1341 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1342 expand_used_vars_for_block (t, false);
1345 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1346 and clear TREE_USED on all local variables. */
1348 static void
1349 clear_tree_used (tree block)
1351 tree t;
1353 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1354 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1355 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1356 || !DECL_NONSHAREABLE (t))
1357 TREE_USED (t) = 0;
1359 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1360 clear_tree_used (t);
1363 enum {
1364 SPCT_FLAG_DEFAULT = 1,
1365 SPCT_FLAG_ALL = 2,
1366 SPCT_FLAG_STRONG = 3
1369 /* Examine TYPE and determine a bit mask of the following features. */
1371 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1372 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1373 #define SPCT_HAS_ARRAY 4
1374 #define SPCT_HAS_AGGREGATE 8
1376 static unsigned int
1377 stack_protect_classify_type (tree type)
1379 unsigned int ret = 0;
1380 tree t;
1382 switch (TREE_CODE (type))
1384 case ARRAY_TYPE:
1385 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1386 if (t == char_type_node
1387 || t == signed_char_type_node
1388 || t == unsigned_char_type_node)
1390 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1391 unsigned HOST_WIDE_INT len;
1393 if (!TYPE_SIZE_UNIT (type)
1394 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1395 len = max;
1396 else
1397 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1399 if (len < max)
1400 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1401 else
1402 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1404 else
1405 ret = SPCT_HAS_ARRAY;
1406 break;
1408 case UNION_TYPE:
1409 case QUAL_UNION_TYPE:
1410 case RECORD_TYPE:
1411 ret = SPCT_HAS_AGGREGATE;
1412 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1413 if (TREE_CODE (t) == FIELD_DECL)
1414 ret |= stack_protect_classify_type (TREE_TYPE (t));
1415 break;
1417 default:
1418 break;
1421 return ret;
1424 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1425 part of the local stack frame. Remember if we ever return nonzero for
1426 any variable in this function. The return value is the phase number in
1427 which the variable should be allocated. */
1429 static int
1430 stack_protect_decl_phase (tree decl)
1432 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1433 int ret = 0;
1435 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1436 has_short_buffer = true;
1438 if (flag_stack_protect == SPCT_FLAG_ALL
1439 || flag_stack_protect == SPCT_FLAG_STRONG)
1441 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1442 && !(bits & SPCT_HAS_AGGREGATE))
1443 ret = 1;
1444 else if (bits & SPCT_HAS_ARRAY)
1445 ret = 2;
1447 else
1448 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1450 if (ret)
1451 has_protected_decls = true;
1453 return ret;
1456 /* Two helper routines that check for phase 1 and phase 2. These are used
1457 as callbacks for expand_stack_vars. */
1459 static bool
1460 stack_protect_decl_phase_1 (size_t i)
1462 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1465 static bool
1466 stack_protect_decl_phase_2 (size_t i)
1468 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1471 /* And helper function that checks for asan phase (with stack protector
1472 it is phase 3). This is used as callback for expand_stack_vars.
1473 Returns true if any of the vars in the partition need to be protected. */
1475 static bool
1476 asan_decl_phase_3 (size_t i)
1478 while (i != EOC)
1480 if (asan_protect_stack_decl (stack_vars[i].decl))
1481 return true;
1482 i = stack_vars[i].next;
1484 return false;
1487 /* Ensure that variables in different stack protection phases conflict
1488 so that they are not merged and share the same stack slot. */
1490 static void
1491 add_stack_protection_conflicts (void)
1493 size_t i, j, n = stack_vars_num;
1494 unsigned char *phase;
1496 phase = XNEWVEC (unsigned char, n);
1497 for (i = 0; i < n; ++i)
1498 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1500 for (i = 0; i < n; ++i)
1502 unsigned char ph_i = phase[i];
1503 for (j = i + 1; j < n; ++j)
1504 if (ph_i != phase[j])
1505 add_stack_var_conflict (i, j);
1508 XDELETEVEC (phase);
1511 /* Create a decl for the guard at the top of the stack frame. */
1513 static void
1514 create_stack_guard (void)
1516 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1517 VAR_DECL, NULL, ptr_type_node);
1518 TREE_THIS_VOLATILE (guard) = 1;
1519 TREE_USED (guard) = 1;
1520 expand_one_stack_var (guard);
1521 crtl->stack_protect_guard = guard;
1524 /* Prepare for expanding variables. */
1525 static void
1526 init_vars_expansion (void)
1528 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1529 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1531 /* A map from decl to stack partition. */
1532 decl_to_stack_part = pointer_map_create ();
1534 /* Initialize local stack smashing state. */
1535 has_protected_decls = false;
1536 has_short_buffer = false;
1539 /* Free up stack variable graph data. */
1540 static void
1541 fini_vars_expansion (void)
1543 bitmap_obstack_release (&stack_var_bitmap_obstack);
1544 if (stack_vars)
1545 XDELETEVEC (stack_vars);
1546 if (stack_vars_sorted)
1547 XDELETEVEC (stack_vars_sorted);
1548 stack_vars = NULL;
1549 stack_vars_sorted = NULL;
1550 stack_vars_alloc = stack_vars_num = 0;
1551 pointer_map_destroy (decl_to_stack_part);
1552 decl_to_stack_part = NULL;
1555 /* Make a fair guess for the size of the stack frame of the function
1556 in NODE. This doesn't have to be exact, the result is only used in
1557 the inline heuristics. So we don't want to run the full stack var
1558 packing algorithm (which is quadratic in the number of stack vars).
1559 Instead, we calculate the total size of all stack vars. This turns
1560 out to be a pretty fair estimate -- packing of stack vars doesn't
1561 happen very often. */
1563 HOST_WIDE_INT
1564 estimated_stack_frame_size (struct cgraph_node *node)
1566 HOST_WIDE_INT size = 0;
1567 size_t i;
1568 tree var;
1569 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1571 push_cfun (fn);
1573 init_vars_expansion ();
1575 FOR_EACH_LOCAL_DECL (fn, i, var)
1576 if (auto_var_in_fn_p (var, fn->decl))
1577 size += expand_one_var (var, true, false);
1579 if (stack_vars_num > 0)
1581 /* Fake sorting the stack vars for account_stack_vars (). */
1582 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1583 for (i = 0; i < stack_vars_num; ++i)
1584 stack_vars_sorted[i] = i;
1585 size += account_stack_vars ();
1588 fini_vars_expansion ();
1589 pop_cfun ();
1590 return size;
1593 /* Helper routine to check if a record or union contains an array field. */
1595 static int
1596 record_or_union_type_has_array_p (const_tree tree_type)
1598 tree fields = TYPE_FIELDS (tree_type);
1599 tree f;
1601 for (f = fields; f; f = DECL_CHAIN (f))
1602 if (TREE_CODE (f) == FIELD_DECL)
1604 tree field_type = TREE_TYPE (f);
1605 if (RECORD_OR_UNION_TYPE_P (field_type)
1606 && record_or_union_type_has_array_p (field_type))
1607 return 1;
1608 if (TREE_CODE (field_type) == ARRAY_TYPE)
1609 return 1;
1611 return 0;
1614 /* Expand all variables used in the function. */
1616 static rtx
1617 expand_used_vars (void)
1619 tree var, outer_block = DECL_INITIAL (current_function_decl);
1620 vec<tree> maybe_local_decls = vNULL;
1621 rtx var_end_seq = NULL_RTX;
1622 struct pointer_map_t *ssa_name_decls;
1623 unsigned i;
1624 unsigned len;
1625 bool gen_stack_protect_signal = false;
1627 /* Compute the phase of the stack frame for this function. */
1629 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1630 int off = STARTING_FRAME_OFFSET % align;
1631 frame_phase = off ? align - off : 0;
1634 /* Set TREE_USED on all variables in the local_decls. */
1635 FOR_EACH_LOCAL_DECL (cfun, i, var)
1636 TREE_USED (var) = 1;
1637 /* Clear TREE_USED on all variables associated with a block scope. */
1638 clear_tree_used (DECL_INITIAL (current_function_decl));
1640 init_vars_expansion ();
1642 ssa_name_decls = pointer_map_create ();
1643 for (i = 0; i < SA.map->num_partitions; i++)
1645 tree var = partition_to_var (SA.map, i);
1647 gcc_assert (!virtual_operand_p (var));
1649 /* Assign decls to each SSA name partition, share decls for partitions
1650 we could have coalesced (those with the same type). */
1651 if (SSA_NAME_VAR (var) == NULL_TREE)
1653 void **slot = pointer_map_insert (ssa_name_decls, TREE_TYPE (var));
1654 if (!*slot)
1655 *slot = (void *) create_tmp_reg (TREE_TYPE (var), NULL);
1656 replace_ssa_name_symbol (var, (tree) *slot);
1659 /* Always allocate space for partitions based on VAR_DECLs. But for
1660 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1661 debug info, there is no need to do so if optimization is disabled
1662 because all the SSA_NAMEs based on these DECLs have been coalesced
1663 into a single partition, which is thus assigned the canonical RTL
1664 location of the DECLs. If in_lto_p, we can't rely on optimize,
1665 a function could be compiled with -O1 -flto first and only the
1666 link performed at -O0. */
1667 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1668 expand_one_var (var, true, true);
1669 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1671 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1672 contain the default def (representing the parm or result itself)
1673 we don't do anything here. But those which don't contain the
1674 default def (representing a temporary based on the parm/result)
1675 we need to allocate space just like for normal VAR_DECLs. */
1676 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1678 expand_one_var (var, true, true);
1679 gcc_assert (SA.partition_to_pseudo[i]);
1683 pointer_map_destroy (ssa_name_decls);
1685 if (flag_stack_protect == SPCT_FLAG_STRONG)
1686 FOR_EACH_LOCAL_DECL (cfun, i, var)
1687 if (!is_global_var (var))
1689 tree var_type = TREE_TYPE (var);
1690 /* Examine local referenced variables that have their addresses taken,
1691 contain an array, or are arrays. */
1692 if (TREE_CODE (var) == VAR_DECL
1693 && (TREE_CODE (var_type) == ARRAY_TYPE
1694 || TREE_ADDRESSABLE (var)
1695 || (RECORD_OR_UNION_TYPE_P (var_type)
1696 && record_or_union_type_has_array_p (var_type))))
1698 gen_stack_protect_signal = true;
1699 break;
1703 /* At this point all variables on the local_decls with TREE_USED
1704 set are not associated with any block scope. Lay them out. */
1706 len = vec_safe_length (cfun->local_decls);
1707 FOR_EACH_LOCAL_DECL (cfun, i, var)
1709 bool expand_now = false;
1711 /* Expanded above already. */
1712 if (is_gimple_reg (var))
1714 TREE_USED (var) = 0;
1715 goto next;
1717 /* We didn't set a block for static or extern because it's hard
1718 to tell the difference between a global variable (re)declared
1719 in a local scope, and one that's really declared there to
1720 begin with. And it doesn't really matter much, since we're
1721 not giving them stack space. Expand them now. */
1722 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1723 expand_now = true;
1725 /* Expand variables not associated with any block now. Those created by
1726 the optimizers could be live anywhere in the function. Those that
1727 could possibly have been scoped originally and detached from their
1728 block will have their allocation deferred so we coalesce them with
1729 others when optimization is enabled. */
1730 else if (TREE_USED (var))
1731 expand_now = true;
1733 /* Finally, mark all variables on the list as used. We'll use
1734 this in a moment when we expand those associated with scopes. */
1735 TREE_USED (var) = 1;
1737 if (expand_now)
1738 expand_one_var (var, true, true);
1740 next:
1741 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1743 rtx rtl = DECL_RTL_IF_SET (var);
1745 /* Keep artificial non-ignored vars in cfun->local_decls
1746 chain until instantiate_decls. */
1747 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1748 add_local_decl (cfun, var);
1749 else if (rtl == NULL_RTX)
1750 /* If rtl isn't set yet, which can happen e.g. with
1751 -fstack-protector, retry before returning from this
1752 function. */
1753 maybe_local_decls.safe_push (var);
1757 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1759 +-----------------+-----------------+
1760 | ...processed... | ...duplicates...|
1761 +-----------------+-----------------+
1763 +-- LEN points here.
1765 We just want the duplicates, as those are the artificial
1766 non-ignored vars that we want to keep until instantiate_decls.
1767 Move them down and truncate the array. */
1768 if (!vec_safe_is_empty (cfun->local_decls))
1769 cfun->local_decls->block_remove (0, len);
1771 /* At this point, all variables within the block tree with TREE_USED
1772 set are actually used by the optimized function. Lay them out. */
1773 expand_used_vars_for_block (outer_block, true);
1775 if (stack_vars_num > 0)
1777 add_scope_conflicts ();
1779 /* If stack protection is enabled, we don't share space between
1780 vulnerable data and non-vulnerable data. */
1781 if (flag_stack_protect)
1782 add_stack_protection_conflicts ();
1784 /* Now that we have collected all stack variables, and have computed a
1785 minimal interference graph, attempt to save some stack space. */
1786 partition_stack_vars ();
1787 if (dump_file)
1788 dump_stack_var_partition ();
1791 switch (flag_stack_protect)
1793 case SPCT_FLAG_ALL:
1794 create_stack_guard ();
1795 break;
1797 case SPCT_FLAG_STRONG:
1798 if (gen_stack_protect_signal
1799 || cfun->calls_alloca || has_protected_decls)
1800 create_stack_guard ();
1801 break;
1803 case SPCT_FLAG_DEFAULT:
1804 if (cfun->calls_alloca || has_protected_decls)
1805 create_stack_guard ();
1806 break;
1808 default:
1812 /* Assign rtl to each variable based on these partitions. */
1813 if (stack_vars_num > 0)
1815 struct stack_vars_data data;
1817 data.asan_vec = vNULL;
1818 data.asan_decl_vec = vNULL;
1819 data.asan_base = NULL_RTX;
1820 data.asan_alignb = 0;
1822 /* Reorder decls to be protected by iterating over the variables
1823 array multiple times, and allocating out of each phase in turn. */
1824 /* ??? We could probably integrate this into the qsort we did
1825 earlier, such that we naturally see these variables first,
1826 and thus naturally allocate things in the right order. */
1827 if (has_protected_decls)
1829 /* Phase 1 contains only character arrays. */
1830 expand_stack_vars (stack_protect_decl_phase_1, &data);
1832 /* Phase 2 contains other kinds of arrays. */
1833 if (flag_stack_protect == 2)
1834 expand_stack_vars (stack_protect_decl_phase_2, &data);
1837 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1838 /* Phase 3, any partitions that need asan protection
1839 in addition to phase 1 and 2. */
1840 expand_stack_vars (asan_decl_phase_3, &data);
1842 if (!data.asan_vec.is_empty ())
1844 HOST_WIDE_INT prev_offset = frame_offset;
1845 HOST_WIDE_INT offset, sz, redzonesz;
1846 redzonesz = ASAN_RED_ZONE_SIZE;
1847 sz = data.asan_vec[0] - prev_offset;
1848 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1849 && data.asan_alignb <= 4096
1850 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1851 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1852 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1853 offset
1854 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1855 data.asan_vec.safe_push (prev_offset);
1856 data.asan_vec.safe_push (offset);
1857 /* Leave space for alignment if STRICT_ALIGNMENT. */
1858 if (STRICT_ALIGNMENT)
1859 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1860 << ASAN_SHADOW_SHIFT)
1861 / BITS_PER_UNIT, 1);
1863 var_end_seq
1864 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1865 data.asan_base,
1866 data.asan_alignb,
1867 data.asan_vec.address (),
1868 data.asan_decl_vec.address (),
1869 data.asan_vec.length ());
1872 expand_stack_vars (NULL, &data);
1874 data.asan_vec.release ();
1875 data.asan_decl_vec.release ();
1878 fini_vars_expansion ();
1880 /* If there were any artificial non-ignored vars without rtl
1881 found earlier, see if deferred stack allocation hasn't assigned
1882 rtl to them. */
1883 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1885 rtx rtl = DECL_RTL_IF_SET (var);
1887 /* Keep artificial non-ignored vars in cfun->local_decls
1888 chain until instantiate_decls. */
1889 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1890 add_local_decl (cfun, var);
1892 maybe_local_decls.release ();
1894 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1895 if (STACK_ALIGNMENT_NEEDED)
1897 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1898 if (!FRAME_GROWS_DOWNWARD)
1899 frame_offset += align - 1;
1900 frame_offset &= -align;
1903 return var_end_seq;
1907 /* If we need to produce a detailed dump, print the tree representation
1908 for STMT to the dump file. SINCE is the last RTX after which the RTL
1909 generated for STMT should have been appended. */
1911 static void
1912 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1914 if (dump_file && (dump_flags & TDF_DETAILS))
1916 fprintf (dump_file, "\n;; ");
1917 print_gimple_stmt (dump_file, stmt, 0,
1918 TDF_SLIM | (dump_flags & TDF_LINENO));
1919 fprintf (dump_file, "\n");
1921 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1925 /* Maps the blocks that do not contain tree labels to rtx labels. */
1927 static struct pointer_map_t *lab_rtx_for_bb;
1929 /* Returns the label_rtx expression for a label starting basic block BB. */
1931 static rtx
1932 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1934 gimple_stmt_iterator gsi;
1935 tree lab;
1936 gimple lab_stmt;
1937 void **elt;
1939 if (bb->flags & BB_RTL)
1940 return block_label (bb);
1942 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1943 if (elt)
1944 return (rtx) *elt;
1946 /* Find the tree label if it is present. */
1948 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1950 lab_stmt = gsi_stmt (gsi);
1951 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1952 break;
1954 lab = gimple_label_label (lab_stmt);
1955 if (DECL_NONLOCAL (lab))
1956 break;
1958 return label_rtx (lab);
1961 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1962 *elt = gen_label_rtx ();
1963 return (rtx) *elt;
1967 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1968 of a basic block where we just expanded the conditional at the end,
1969 possibly clean up the CFG and instruction sequence. LAST is the
1970 last instruction before the just emitted jump sequence. */
1972 static void
1973 maybe_cleanup_end_of_block (edge e, rtx last)
1975 /* Special case: when jumpif decides that the condition is
1976 trivial it emits an unconditional jump (and the necessary
1977 barrier). But we still have two edges, the fallthru one is
1978 wrong. purge_dead_edges would clean this up later. Unfortunately
1979 we have to insert insns (and split edges) before
1980 find_many_sub_basic_blocks and hence before purge_dead_edges.
1981 But splitting edges might create new blocks which depend on the
1982 fact that if there are two edges there's no barrier. So the
1983 barrier would get lost and verify_flow_info would ICE. Instead
1984 of auditing all edge splitters to care for the barrier (which
1985 normally isn't there in a cleaned CFG), fix it here. */
1986 if (BARRIER_P (get_last_insn ()))
1988 rtx insn;
1989 remove_edge (e);
1990 /* Now, we have a single successor block, if we have insns to
1991 insert on the remaining edge we potentially will insert
1992 it at the end of this block (if the dest block isn't feasible)
1993 in order to avoid splitting the edge. This insertion will take
1994 place in front of the last jump. But we might have emitted
1995 multiple jumps (conditional and one unconditional) to the
1996 same destination. Inserting in front of the last one then
1997 is a problem. See PR 40021. We fix this by deleting all
1998 jumps except the last unconditional one. */
1999 insn = PREV_INSN (get_last_insn ());
2000 /* Make sure we have an unconditional jump. Otherwise we're
2001 confused. */
2002 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2003 for (insn = PREV_INSN (insn); insn != last;)
2005 insn = PREV_INSN (insn);
2006 if (JUMP_P (NEXT_INSN (insn)))
2008 if (!any_condjump_p (NEXT_INSN (insn)))
2010 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2011 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2013 delete_insn (NEXT_INSN (insn));
2019 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2020 Returns a new basic block if we've terminated the current basic
2021 block and created a new one. */
2023 static basic_block
2024 expand_gimple_cond (basic_block bb, gimple stmt)
2026 basic_block new_bb, dest;
2027 edge new_edge;
2028 edge true_edge;
2029 edge false_edge;
2030 rtx last2, last;
2031 enum tree_code code;
2032 tree op0, op1;
2034 code = gimple_cond_code (stmt);
2035 op0 = gimple_cond_lhs (stmt);
2036 op1 = gimple_cond_rhs (stmt);
2037 /* We're sometimes presented with such code:
2038 D.123_1 = x < y;
2039 if (D.123_1 != 0)
2041 This would expand to two comparisons which then later might
2042 be cleaned up by combine. But some pattern matchers like if-conversion
2043 work better when there's only one compare, so make up for this
2044 here as special exception if TER would have made the same change. */
2045 if (SA.values
2046 && TREE_CODE (op0) == SSA_NAME
2047 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2048 && TREE_CODE (op1) == INTEGER_CST
2049 && ((gimple_cond_code (stmt) == NE_EXPR
2050 && integer_zerop (op1))
2051 || (gimple_cond_code (stmt) == EQ_EXPR
2052 && integer_onep (op1)))
2053 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2055 gimple second = SSA_NAME_DEF_STMT (op0);
2056 if (gimple_code (second) == GIMPLE_ASSIGN)
2058 enum tree_code code2 = gimple_assign_rhs_code (second);
2059 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2061 code = code2;
2062 op0 = gimple_assign_rhs1 (second);
2063 op1 = gimple_assign_rhs2 (second);
2065 /* If jumps are cheap turn some more codes into
2066 jumpy sequences. */
2067 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2069 if ((code2 == BIT_AND_EXPR
2070 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2071 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2072 || code2 == TRUTH_AND_EXPR)
2074 code = TRUTH_ANDIF_EXPR;
2075 op0 = gimple_assign_rhs1 (second);
2076 op1 = gimple_assign_rhs2 (second);
2078 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2080 code = TRUTH_ORIF_EXPR;
2081 op0 = gimple_assign_rhs1 (second);
2082 op1 = gimple_assign_rhs2 (second);
2088 last2 = last = get_last_insn ();
2090 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2091 set_curr_insn_location (gimple_location (stmt));
2093 /* These flags have no purpose in RTL land. */
2094 true_edge->flags &= ~EDGE_TRUE_VALUE;
2095 false_edge->flags &= ~EDGE_FALSE_VALUE;
2097 /* We can either have a pure conditional jump with one fallthru edge or
2098 two-way jump that needs to be decomposed into two basic blocks. */
2099 if (false_edge->dest == bb->next_bb)
2101 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2102 true_edge->probability);
2103 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2104 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2105 set_curr_insn_location (true_edge->goto_locus);
2106 false_edge->flags |= EDGE_FALLTHRU;
2107 maybe_cleanup_end_of_block (false_edge, last);
2108 return NULL;
2110 if (true_edge->dest == bb->next_bb)
2112 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2113 false_edge->probability);
2114 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2115 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2116 set_curr_insn_location (false_edge->goto_locus);
2117 true_edge->flags |= EDGE_FALLTHRU;
2118 maybe_cleanup_end_of_block (true_edge, last);
2119 return NULL;
2122 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2123 true_edge->probability);
2124 last = get_last_insn ();
2125 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2126 set_curr_insn_location (false_edge->goto_locus);
2127 emit_jump (label_rtx_for_bb (false_edge->dest));
2129 BB_END (bb) = last;
2130 if (BARRIER_P (BB_END (bb)))
2131 BB_END (bb) = PREV_INSN (BB_END (bb));
2132 update_bb_for_insn (bb);
2134 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2135 dest = false_edge->dest;
2136 redirect_edge_succ (false_edge, new_bb);
2137 false_edge->flags |= EDGE_FALLTHRU;
2138 new_bb->count = false_edge->count;
2139 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2140 if (current_loops && bb->loop_father)
2141 add_bb_to_loop (new_bb, bb->loop_father);
2142 new_edge = make_edge (new_bb, dest, 0);
2143 new_edge->probability = REG_BR_PROB_BASE;
2144 new_edge->count = new_bb->count;
2145 if (BARRIER_P (BB_END (new_bb)))
2146 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2147 update_bb_for_insn (new_bb);
2149 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2151 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2153 set_curr_insn_location (true_edge->goto_locus);
2154 true_edge->goto_locus = curr_insn_location ();
2157 return new_bb;
2160 /* Mark all calls that can have a transaction restart. */
2162 static void
2163 mark_transaction_restart_calls (gimple stmt)
2165 struct tm_restart_node dummy;
2166 void **slot;
2168 if (!cfun->gimple_df->tm_restart)
2169 return;
2171 dummy.stmt = stmt;
2172 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2173 if (slot)
2175 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2176 tree list = n->label_or_list;
2177 rtx insn;
2179 for (insn = next_real_insn (get_last_insn ());
2180 !CALL_P (insn);
2181 insn = next_real_insn (insn))
2182 continue;
2184 if (TREE_CODE (list) == LABEL_DECL)
2185 add_reg_note (insn, REG_TM, label_rtx (list));
2186 else
2187 for (; list ; list = TREE_CHAIN (list))
2188 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2192 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2193 statement STMT. */
2195 static void
2196 expand_call_stmt (gimple stmt)
2198 tree exp, decl, lhs;
2199 bool builtin_p;
2200 size_t i;
2202 if (gimple_call_internal_p (stmt))
2204 expand_internal_call (stmt);
2205 return;
2208 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2210 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2211 decl = gimple_call_fndecl (stmt);
2212 builtin_p = decl && DECL_BUILT_IN (decl);
2214 /* If this is not a builtin function, the function type through which the
2215 call is made may be different from the type of the function. */
2216 if (!builtin_p)
2217 CALL_EXPR_FN (exp)
2218 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2219 CALL_EXPR_FN (exp));
2221 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2222 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2224 for (i = 0; i < gimple_call_num_args (stmt); i++)
2226 tree arg = gimple_call_arg (stmt, i);
2227 gimple def;
2228 /* TER addresses into arguments of builtin functions so we have a
2229 chance to infer more correct alignment information. See PR39954. */
2230 if (builtin_p
2231 && TREE_CODE (arg) == SSA_NAME
2232 && (def = get_gimple_for_ssa_name (arg))
2233 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2234 arg = gimple_assign_rhs1 (def);
2235 CALL_EXPR_ARG (exp, i) = arg;
2238 if (gimple_has_side_effects (stmt))
2239 TREE_SIDE_EFFECTS (exp) = 1;
2241 if (gimple_call_nothrow_p (stmt))
2242 TREE_NOTHROW (exp) = 1;
2244 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2245 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2246 if (decl
2247 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2248 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2249 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2250 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2251 else
2252 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2253 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2254 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2256 /* Ensure RTL is created for debug args. */
2257 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2259 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2260 unsigned int ix;
2261 tree dtemp;
2263 if (debug_args)
2264 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2266 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2267 expand_debug_expr (dtemp);
2271 lhs = gimple_call_lhs (stmt);
2272 if (lhs)
2273 expand_assignment (lhs, exp, false);
2274 else
2275 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2277 mark_transaction_restart_calls (stmt);
2281 /* Generate RTL for an asm statement (explicit assembler code).
2282 STRING is a STRING_CST node containing the assembler code text,
2283 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2284 insn is volatile; don't optimize it. */
2286 static void
2287 expand_asm_loc (tree string, int vol, location_t locus)
2289 rtx body;
2291 if (TREE_CODE (string) == ADDR_EXPR)
2292 string = TREE_OPERAND (string, 0);
2294 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2295 ggc_strdup (TREE_STRING_POINTER (string)),
2296 locus);
2298 MEM_VOLATILE_P (body) = vol;
2300 emit_insn (body);
2303 /* Return the number of times character C occurs in string S. */
2304 static int
2305 n_occurrences (int c, const char *s)
2307 int n = 0;
2308 while (*s)
2309 n += (*s++ == c);
2310 return n;
2313 /* A subroutine of expand_asm_operands. Check that all operands have
2314 the same number of alternatives. Return true if so. */
2316 static bool
2317 check_operand_nalternatives (tree outputs, tree inputs)
2319 if (outputs || inputs)
2321 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2322 int nalternatives
2323 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2324 tree next = inputs;
2326 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2328 error ("too many alternatives in %<asm%>");
2329 return false;
2332 tmp = outputs;
2333 while (tmp)
2335 const char *constraint
2336 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2338 if (n_occurrences (',', constraint) != nalternatives)
2340 error ("operand constraints for %<asm%> differ "
2341 "in number of alternatives");
2342 return false;
2345 if (TREE_CHAIN (tmp))
2346 tmp = TREE_CHAIN (tmp);
2347 else
2348 tmp = next, next = 0;
2352 return true;
2355 /* Check for overlap between registers marked in CLOBBERED_REGS and
2356 anything inappropriate in T. Emit error and return the register
2357 variable definition for error, NULL_TREE for ok. */
2359 static bool
2360 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2362 /* Conflicts between asm-declared register variables and the clobber
2363 list are not allowed. */
2364 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2366 if (overlap)
2368 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2369 DECL_NAME (overlap));
2371 /* Reset registerness to stop multiple errors emitted for a single
2372 variable. */
2373 DECL_REGISTER (overlap) = 0;
2374 return true;
2377 return false;
2380 /* Generate RTL for an asm statement with arguments.
2381 STRING is the instruction template.
2382 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2383 Each output or input has an expression in the TREE_VALUE and
2384 a tree list in TREE_PURPOSE which in turn contains a constraint
2385 name in TREE_VALUE (or NULL_TREE) and a constraint string
2386 in TREE_PURPOSE.
2387 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2388 that is clobbered by this insn.
2390 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2391 should be the fallthru basic block of the asm goto.
2393 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2394 Some elements of OUTPUTS may be replaced with trees representing temporary
2395 values. The caller should copy those temporary values to the originally
2396 specified lvalues.
2398 VOL nonzero means the insn is volatile; don't optimize it. */
2400 static void
2401 expand_asm_operands (tree string, tree outputs, tree inputs,
2402 tree clobbers, tree labels, basic_block fallthru_bb,
2403 int vol, location_t locus)
2405 rtvec argvec, constraintvec, labelvec;
2406 rtx body;
2407 int ninputs = list_length (inputs);
2408 int noutputs = list_length (outputs);
2409 int nlabels = list_length (labels);
2410 int ninout;
2411 int nclobbers;
2412 HARD_REG_SET clobbered_regs;
2413 int clobber_conflict_found = 0;
2414 tree tail;
2415 tree t;
2416 int i;
2417 /* Vector of RTX's of evaluated output operands. */
2418 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2419 int *inout_opnum = XALLOCAVEC (int, noutputs);
2420 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2421 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2422 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2423 int old_generating_concat_p = generating_concat_p;
2424 rtx fallthru_label = NULL_RTX;
2426 /* An ASM with no outputs needs to be treated as volatile, for now. */
2427 if (noutputs == 0)
2428 vol = 1;
2430 if (! check_operand_nalternatives (outputs, inputs))
2431 return;
2433 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2435 /* Collect constraints. */
2436 i = 0;
2437 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2438 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2439 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2440 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2442 /* Sometimes we wish to automatically clobber registers across an asm.
2443 Case in point is when the i386 backend moved from cc0 to a hard reg --
2444 maintaining source-level compatibility means automatically clobbering
2445 the flags register. */
2446 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2448 /* Count the number of meaningful clobbered registers, ignoring what
2449 we would ignore later. */
2450 nclobbers = 0;
2451 CLEAR_HARD_REG_SET (clobbered_regs);
2452 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2454 const char *regname;
2455 int nregs;
2457 if (TREE_VALUE (tail) == error_mark_node)
2458 return;
2459 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2461 i = decode_reg_name_and_count (regname, &nregs);
2462 if (i == -4)
2463 ++nclobbers;
2464 else if (i == -2)
2465 error ("unknown register name %qs in %<asm%>", regname);
2467 /* Mark clobbered registers. */
2468 if (i >= 0)
2470 int reg;
2472 for (reg = i; reg < i + nregs; reg++)
2474 ++nclobbers;
2476 /* Clobbering the PIC register is an error. */
2477 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2479 error ("PIC register clobbered by %qs in %<asm%>", regname);
2480 return;
2483 SET_HARD_REG_BIT (clobbered_regs, reg);
2488 /* First pass over inputs and outputs checks validity and sets
2489 mark_addressable if needed. */
2491 ninout = 0;
2492 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2494 tree val = TREE_VALUE (tail);
2495 tree type = TREE_TYPE (val);
2496 const char *constraint;
2497 bool is_inout;
2498 bool allows_reg;
2499 bool allows_mem;
2501 /* If there's an erroneous arg, emit no insn. */
2502 if (type == error_mark_node)
2503 return;
2505 /* Try to parse the output constraint. If that fails, there's
2506 no point in going further. */
2507 constraint = constraints[i];
2508 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2509 &allows_mem, &allows_reg, &is_inout))
2510 return;
2512 if (! allows_reg
2513 && (allows_mem
2514 || is_inout
2515 || (DECL_P (val)
2516 && REG_P (DECL_RTL (val))
2517 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2518 mark_addressable (val);
2520 if (is_inout)
2521 ninout++;
2524 ninputs += ninout;
2525 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2527 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2528 return;
2531 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2533 bool allows_reg, allows_mem;
2534 const char *constraint;
2536 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2537 would get VOIDmode and that could cause a crash in reload. */
2538 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2539 return;
2541 constraint = constraints[i + noutputs];
2542 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2543 constraints, &allows_mem, &allows_reg))
2544 return;
2546 if (! allows_reg && allows_mem)
2547 mark_addressable (TREE_VALUE (tail));
2550 /* Second pass evaluates arguments. */
2552 /* Make sure stack is consistent for asm goto. */
2553 if (nlabels > 0)
2554 do_pending_stack_adjust ();
2556 ninout = 0;
2557 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2559 tree val = TREE_VALUE (tail);
2560 tree type = TREE_TYPE (val);
2561 bool is_inout;
2562 bool allows_reg;
2563 bool allows_mem;
2564 rtx op;
2565 bool ok;
2567 ok = parse_output_constraint (&constraints[i], i, ninputs,
2568 noutputs, &allows_mem, &allows_reg,
2569 &is_inout);
2570 gcc_assert (ok);
2572 /* If an output operand is not a decl or indirect ref and our constraint
2573 allows a register, make a temporary to act as an intermediate.
2574 Make the asm insn write into that, then our caller will copy it to
2575 the real output operand. Likewise for promoted variables. */
2577 generating_concat_p = 0;
2579 real_output_rtx[i] = NULL_RTX;
2580 if ((TREE_CODE (val) == INDIRECT_REF
2581 && allows_mem)
2582 || (DECL_P (val)
2583 && (allows_mem || REG_P (DECL_RTL (val)))
2584 && ! (REG_P (DECL_RTL (val))
2585 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2586 || ! allows_reg
2587 || is_inout)
2589 op = expand_expr (val, NULL_RTX, VOIDmode,
2590 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2591 if (MEM_P (op))
2592 op = validize_mem (op);
2594 if (! allows_reg && !MEM_P (op))
2595 error ("output number %d not directly addressable", i);
2596 if ((! allows_mem && MEM_P (op))
2597 || GET_CODE (op) == CONCAT)
2599 real_output_rtx[i] = op;
2600 op = gen_reg_rtx (GET_MODE (op));
2601 if (is_inout)
2602 emit_move_insn (op, real_output_rtx[i]);
2605 else
2607 op = assign_temp (type, 0, 1);
2608 op = validize_mem (op);
2609 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2610 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2611 TREE_VALUE (tail) = make_tree (type, op);
2613 output_rtx[i] = op;
2615 generating_concat_p = old_generating_concat_p;
2617 if (is_inout)
2619 inout_mode[ninout] = TYPE_MODE (type);
2620 inout_opnum[ninout++] = i;
2623 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2624 clobber_conflict_found = 1;
2627 /* Make vectors for the expression-rtx, constraint strings,
2628 and named operands. */
2630 argvec = rtvec_alloc (ninputs);
2631 constraintvec = rtvec_alloc (ninputs);
2632 labelvec = rtvec_alloc (nlabels);
2634 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2635 : GET_MODE (output_rtx[0])),
2636 ggc_strdup (TREE_STRING_POINTER (string)),
2637 empty_string, 0, argvec, constraintvec,
2638 labelvec, locus);
2640 MEM_VOLATILE_P (body) = vol;
2642 /* Eval the inputs and put them into ARGVEC.
2643 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2645 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2647 bool allows_reg, allows_mem;
2648 const char *constraint;
2649 tree val, type;
2650 rtx op;
2651 bool ok;
2653 constraint = constraints[i + noutputs];
2654 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2655 constraints, &allows_mem, &allows_reg);
2656 gcc_assert (ok);
2658 generating_concat_p = 0;
2660 val = TREE_VALUE (tail);
2661 type = TREE_TYPE (val);
2662 /* EXPAND_INITIALIZER will not generate code for valid initializer
2663 constants, but will still generate code for other types of operand.
2664 This is the behavior we want for constant constraints. */
2665 op = expand_expr (val, NULL_RTX, VOIDmode,
2666 allows_reg ? EXPAND_NORMAL
2667 : allows_mem ? EXPAND_MEMORY
2668 : EXPAND_INITIALIZER);
2670 /* Never pass a CONCAT to an ASM. */
2671 if (GET_CODE (op) == CONCAT)
2672 op = force_reg (GET_MODE (op), op);
2673 else if (MEM_P (op))
2674 op = validize_mem (op);
2676 if (asm_operand_ok (op, constraint, NULL) <= 0)
2678 if (allows_reg && TYPE_MODE (type) != BLKmode)
2679 op = force_reg (TYPE_MODE (type), op);
2680 else if (!allows_mem)
2681 warning (0, "asm operand %d probably doesn%'t match constraints",
2682 i + noutputs);
2683 else if (MEM_P (op))
2685 /* We won't recognize either volatile memory or memory
2686 with a queued address as available a memory_operand
2687 at this point. Ignore it: clearly this *is* a memory. */
2689 else
2690 gcc_unreachable ();
2693 generating_concat_p = old_generating_concat_p;
2694 ASM_OPERANDS_INPUT (body, i) = op;
2696 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2697 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2698 ggc_strdup (constraints[i + noutputs]),
2699 locus);
2701 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2702 clobber_conflict_found = 1;
2705 /* Protect all the operands from the queue now that they have all been
2706 evaluated. */
2708 generating_concat_p = 0;
2710 /* For in-out operands, copy output rtx to input rtx. */
2711 for (i = 0; i < ninout; i++)
2713 int j = inout_opnum[i];
2714 char buffer[16];
2716 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2717 = output_rtx[j];
2719 sprintf (buffer, "%d", j);
2720 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2721 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2724 /* Copy labels to the vector. */
2725 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2727 rtx r;
2728 /* If asm goto has any labels in the fallthru basic block, use
2729 a label that we emit immediately after the asm goto. Expansion
2730 may insert further instructions into the same basic block after
2731 asm goto and if we don't do this, insertion of instructions on
2732 the fallthru edge might misbehave. See PR58670. */
2733 if (fallthru_bb
2734 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2736 if (fallthru_label == NULL_RTX)
2737 fallthru_label = gen_label_rtx ();
2738 r = fallthru_label;
2740 else
2741 r = label_rtx (TREE_VALUE (tail));
2742 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2745 generating_concat_p = old_generating_concat_p;
2747 /* Now, for each output, construct an rtx
2748 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2749 ARGVEC CONSTRAINTS OPNAMES))
2750 If there is more than one, put them inside a PARALLEL. */
2752 if (nlabels > 0 && nclobbers == 0)
2754 gcc_assert (noutputs == 0);
2755 emit_jump_insn (body);
2757 else if (noutputs == 0 && nclobbers == 0)
2759 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2760 emit_insn (body);
2762 else if (noutputs == 1 && nclobbers == 0)
2764 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2765 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2767 else
2769 rtx obody = body;
2770 int num = noutputs;
2772 if (num == 0)
2773 num = 1;
2775 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2777 /* For each output operand, store a SET. */
2778 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2780 XVECEXP (body, 0, i)
2781 = gen_rtx_SET (VOIDmode,
2782 output_rtx[i],
2783 gen_rtx_ASM_OPERANDS
2784 (GET_MODE (output_rtx[i]),
2785 ggc_strdup (TREE_STRING_POINTER (string)),
2786 ggc_strdup (constraints[i]),
2787 i, argvec, constraintvec, labelvec, locus));
2789 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2792 /* If there are no outputs (but there are some clobbers)
2793 store the bare ASM_OPERANDS into the PARALLEL. */
2795 if (i == 0)
2796 XVECEXP (body, 0, i++) = obody;
2798 /* Store (clobber REG) for each clobbered register specified. */
2800 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2802 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2803 int reg, nregs;
2804 int j = decode_reg_name_and_count (regname, &nregs);
2805 rtx clobbered_reg;
2807 if (j < 0)
2809 if (j == -3) /* `cc', which is not a register */
2810 continue;
2812 if (j == -4) /* `memory', don't cache memory across asm */
2814 XVECEXP (body, 0, i++)
2815 = gen_rtx_CLOBBER (VOIDmode,
2816 gen_rtx_MEM
2817 (BLKmode,
2818 gen_rtx_SCRATCH (VOIDmode)));
2819 continue;
2822 /* Ignore unknown register, error already signaled. */
2823 continue;
2826 for (reg = j; reg < j + nregs; reg++)
2828 /* Use QImode since that's guaranteed to clobber just
2829 * one reg. */
2830 clobbered_reg = gen_rtx_REG (QImode, reg);
2832 /* Do sanity check for overlap between clobbers and
2833 respectively input and outputs that hasn't been
2834 handled. Such overlap should have been detected and
2835 reported above. */
2836 if (!clobber_conflict_found)
2838 int opno;
2840 /* We test the old body (obody) contents to avoid
2841 tripping over the under-construction body. */
2842 for (opno = 0; opno < noutputs; opno++)
2843 if (reg_overlap_mentioned_p (clobbered_reg,
2844 output_rtx[opno]))
2845 internal_error
2846 ("asm clobber conflict with output operand");
2848 for (opno = 0; opno < ninputs - ninout; opno++)
2849 if (reg_overlap_mentioned_p (clobbered_reg,
2850 ASM_OPERANDS_INPUT (obody,
2851 opno)))
2852 internal_error
2853 ("asm clobber conflict with input operand");
2856 XVECEXP (body, 0, i++)
2857 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2861 if (nlabels > 0)
2862 emit_jump_insn (body);
2863 else
2864 emit_insn (body);
2867 if (fallthru_label)
2868 emit_label (fallthru_label);
2870 /* For any outputs that needed reloading into registers, spill them
2871 back to where they belong. */
2872 for (i = 0; i < noutputs; ++i)
2873 if (real_output_rtx[i])
2874 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2876 crtl->has_asm_statement = 1;
2877 free_temp_slots ();
2881 static void
2882 expand_asm_stmt (gimple stmt)
2884 int noutputs;
2885 tree outputs, tail, t;
2886 tree *o;
2887 size_t i, n;
2888 const char *s;
2889 tree str, out, in, cl, labels;
2890 location_t locus = gimple_location (stmt);
2891 basic_block fallthru_bb = NULL;
2893 /* Meh... convert the gimple asm operands into real tree lists.
2894 Eventually we should make all routines work on the vectors instead
2895 of relying on TREE_CHAIN. */
2896 out = NULL_TREE;
2897 n = gimple_asm_noutputs (stmt);
2898 if (n > 0)
2900 t = out = gimple_asm_output_op (stmt, 0);
2901 for (i = 1; i < n; i++)
2902 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2905 in = NULL_TREE;
2906 n = gimple_asm_ninputs (stmt);
2907 if (n > 0)
2909 t = in = gimple_asm_input_op (stmt, 0);
2910 for (i = 1; i < n; i++)
2911 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2914 cl = NULL_TREE;
2915 n = gimple_asm_nclobbers (stmt);
2916 if (n > 0)
2918 t = cl = gimple_asm_clobber_op (stmt, 0);
2919 for (i = 1; i < n; i++)
2920 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2923 labels = NULL_TREE;
2924 n = gimple_asm_nlabels (stmt);
2925 if (n > 0)
2927 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2928 if (fallthru)
2929 fallthru_bb = fallthru->dest;
2930 t = labels = gimple_asm_label_op (stmt, 0);
2931 for (i = 1; i < n; i++)
2932 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2935 s = gimple_asm_string (stmt);
2936 str = build_string (strlen (s), s);
2938 if (gimple_asm_input_p (stmt))
2940 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
2941 return;
2944 outputs = out;
2945 noutputs = gimple_asm_noutputs (stmt);
2946 /* o[I] is the place that output number I should be written. */
2947 o = (tree *) alloca (noutputs * sizeof (tree));
2949 /* Record the contents of OUTPUTS before it is modified. */
2950 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2951 o[i] = TREE_VALUE (tail);
2953 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
2954 OUTPUTS some trees for where the values were actually stored. */
2955 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
2956 gimple_asm_volatile_p (stmt), locus);
2958 /* Copy all the intermediate outputs into the specified outputs. */
2959 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2961 if (o[i] != TREE_VALUE (tail))
2963 expand_assignment (o[i], TREE_VALUE (tail), false);
2964 free_temp_slots ();
2966 /* Restore the original value so that it's correct the next
2967 time we expand this function. */
2968 TREE_VALUE (tail) = o[i];
2973 /* Emit code to jump to the address
2974 specified by the pointer expression EXP. */
2976 static void
2977 expand_computed_goto (tree exp)
2979 rtx x = expand_normal (exp);
2981 x = convert_memory_address (Pmode, x);
2983 do_pending_stack_adjust ();
2984 emit_indirect_jump (x);
2987 /* Generate RTL code for a `goto' statement with target label LABEL.
2988 LABEL should be a LABEL_DECL tree node that was or will later be
2989 defined with `expand_label'. */
2991 static void
2992 expand_goto (tree label)
2994 #ifdef ENABLE_CHECKING
2995 /* Check for a nonlocal goto to a containing function. Should have
2996 gotten translated to __builtin_nonlocal_goto. */
2997 tree context = decl_function_context (label);
2998 gcc_assert (!context || context == current_function_decl);
2999 #endif
3001 emit_jump (label_rtx (label));
3004 /* Output a return with no value. */
3006 static void
3007 expand_null_return_1 (void)
3009 clear_pending_stack_adjust ();
3010 do_pending_stack_adjust ();
3011 emit_jump (return_label);
3014 /* Generate RTL to return from the current function, with no value.
3015 (That is, we do not do anything about returning any value.) */
3017 void
3018 expand_null_return (void)
3020 /* If this function was declared to return a value, but we
3021 didn't, clobber the return registers so that they are not
3022 propagated live to the rest of the function. */
3023 clobber_return_register ();
3025 expand_null_return_1 ();
3028 /* Generate RTL to return from the current function, with value VAL. */
3030 static void
3031 expand_value_return (rtx val)
3033 /* Copy the value to the return location unless it's already there. */
3035 tree decl = DECL_RESULT (current_function_decl);
3036 rtx return_reg = DECL_RTL (decl);
3037 if (return_reg != val)
3039 tree funtype = TREE_TYPE (current_function_decl);
3040 tree type = TREE_TYPE (decl);
3041 int unsignedp = TYPE_UNSIGNED (type);
3042 enum machine_mode old_mode = DECL_MODE (decl);
3043 enum machine_mode mode;
3044 if (DECL_BY_REFERENCE (decl))
3045 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3046 else
3047 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3049 if (mode != old_mode)
3050 val = convert_modes (mode, old_mode, val, unsignedp);
3052 if (GET_CODE (return_reg) == PARALLEL)
3053 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3054 else
3055 emit_move_insn (return_reg, val);
3058 expand_null_return_1 ();
3061 /* Generate RTL to evaluate the expression RETVAL and return it
3062 from the current function. */
3064 static void
3065 expand_return (tree retval)
3067 rtx result_rtl;
3068 rtx val = 0;
3069 tree retval_rhs;
3071 /* If function wants no value, give it none. */
3072 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3074 expand_normal (retval);
3075 expand_null_return ();
3076 return;
3079 if (retval == error_mark_node)
3081 /* Treat this like a return of no value from a function that
3082 returns a value. */
3083 expand_null_return ();
3084 return;
3086 else if ((TREE_CODE (retval) == MODIFY_EXPR
3087 || TREE_CODE (retval) == INIT_EXPR)
3088 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3089 retval_rhs = TREE_OPERAND (retval, 1);
3090 else
3091 retval_rhs = retval;
3093 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3095 /* If we are returning the RESULT_DECL, then the value has already
3096 been stored into it, so we don't have to do anything special. */
3097 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3098 expand_value_return (result_rtl);
3100 /* If the result is an aggregate that is being returned in one (or more)
3101 registers, load the registers here. */
3103 else if (retval_rhs != 0
3104 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3105 && REG_P (result_rtl))
3107 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3108 if (val)
3110 /* Use the mode of the result value on the return register. */
3111 PUT_MODE (result_rtl, GET_MODE (val));
3112 expand_value_return (val);
3114 else
3115 expand_null_return ();
3117 else if (retval_rhs != 0
3118 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3119 && (REG_P (result_rtl)
3120 || (GET_CODE (result_rtl) == PARALLEL)))
3122 /* Compute the return value into a temporary (usually a pseudo reg). */
3124 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3125 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3126 val = force_not_mem (val);
3127 expand_value_return (val);
3129 else
3131 /* No hard reg used; calculate value into hard return reg. */
3132 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3133 expand_value_return (result_rtl);
3137 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3138 STMT that doesn't require special handling for outgoing edges. That
3139 is no tailcalls and no GIMPLE_COND. */
3141 static void
3142 expand_gimple_stmt_1 (gimple stmt)
3144 tree op0;
3146 set_curr_insn_location (gimple_location (stmt));
3148 switch (gimple_code (stmt))
3150 case GIMPLE_GOTO:
3151 op0 = gimple_goto_dest (stmt);
3152 if (TREE_CODE (op0) == LABEL_DECL)
3153 expand_goto (op0);
3154 else
3155 expand_computed_goto (op0);
3156 break;
3157 case GIMPLE_LABEL:
3158 expand_label (gimple_label_label (stmt));
3159 break;
3160 case GIMPLE_NOP:
3161 case GIMPLE_PREDICT:
3162 break;
3163 case GIMPLE_SWITCH:
3164 expand_case (stmt);
3165 break;
3166 case GIMPLE_ASM:
3167 expand_asm_stmt (stmt);
3168 break;
3169 case GIMPLE_CALL:
3170 expand_call_stmt (stmt);
3171 break;
3173 case GIMPLE_RETURN:
3174 op0 = gimple_return_retval (stmt);
3176 if (op0 && op0 != error_mark_node)
3178 tree result = DECL_RESULT (current_function_decl);
3180 /* If we are not returning the current function's RESULT_DECL,
3181 build an assignment to it. */
3182 if (op0 != result)
3184 /* I believe that a function's RESULT_DECL is unique. */
3185 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3187 /* ??? We'd like to use simply expand_assignment here,
3188 but this fails if the value is of BLKmode but the return
3189 decl is a register. expand_return has special handling
3190 for this combination, which eventually should move
3191 to common code. See comments there. Until then, let's
3192 build a modify expression :-/ */
3193 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3194 result, op0);
3197 if (!op0)
3198 expand_null_return ();
3199 else
3200 expand_return (op0);
3201 break;
3203 case GIMPLE_ASSIGN:
3205 tree lhs = gimple_assign_lhs (stmt);
3207 /* Tree expand used to fiddle with |= and &= of two bitfield
3208 COMPONENT_REFs here. This can't happen with gimple, the LHS
3209 of binary assigns must be a gimple reg. */
3211 if (TREE_CODE (lhs) != SSA_NAME
3212 || get_gimple_rhs_class (gimple_expr_code (stmt))
3213 == GIMPLE_SINGLE_RHS)
3215 tree rhs = gimple_assign_rhs1 (stmt);
3216 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3217 == GIMPLE_SINGLE_RHS);
3218 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3219 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3220 if (TREE_CLOBBER_P (rhs))
3221 /* This is a clobber to mark the going out of scope for
3222 this LHS. */
3224 else
3225 expand_assignment (lhs, rhs,
3226 gimple_assign_nontemporal_move_p (stmt));
3228 else
3230 rtx target, temp;
3231 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3232 struct separate_ops ops;
3233 bool promoted = false;
3235 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3236 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3237 promoted = true;
3239 ops.code = gimple_assign_rhs_code (stmt);
3240 ops.type = TREE_TYPE (lhs);
3241 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3243 case GIMPLE_TERNARY_RHS:
3244 ops.op2 = gimple_assign_rhs3 (stmt);
3245 /* Fallthru */
3246 case GIMPLE_BINARY_RHS:
3247 ops.op1 = gimple_assign_rhs2 (stmt);
3248 /* Fallthru */
3249 case GIMPLE_UNARY_RHS:
3250 ops.op0 = gimple_assign_rhs1 (stmt);
3251 break;
3252 default:
3253 gcc_unreachable ();
3255 ops.location = gimple_location (stmt);
3257 /* If we want to use a nontemporal store, force the value to
3258 register first. If we store into a promoted register,
3259 don't directly expand to target. */
3260 temp = nontemporal || promoted ? NULL_RTX : target;
3261 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3262 EXPAND_NORMAL);
3264 if (temp == target)
3266 else if (promoted)
3268 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
3269 /* If TEMP is a VOIDmode constant, use convert_modes to make
3270 sure that we properly convert it. */
3271 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3273 temp = convert_modes (GET_MODE (target),
3274 TYPE_MODE (ops.type),
3275 temp, unsignedp);
3276 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3277 GET_MODE (target), temp, unsignedp);
3280 convert_move (SUBREG_REG (target), temp, unsignedp);
3282 else if (nontemporal && emit_storent_insn (target, temp))
3284 else
3286 temp = force_operand (temp, target);
3287 if (temp != target)
3288 emit_move_insn (target, temp);
3292 break;
3294 default:
3295 gcc_unreachable ();
3299 /* Expand one gimple statement STMT and return the last RTL instruction
3300 before any of the newly generated ones.
3302 In addition to generating the necessary RTL instructions this also
3303 sets REG_EH_REGION notes if necessary and sets the current source
3304 location for diagnostics. */
3306 static rtx
3307 expand_gimple_stmt (gimple stmt)
3309 location_t saved_location = input_location;
3310 rtx last = get_last_insn ();
3311 int lp_nr;
3313 gcc_assert (cfun);
3315 /* We need to save and restore the current source location so that errors
3316 discovered during expansion are emitted with the right location. But
3317 it would be better if the diagnostic routines used the source location
3318 embedded in the tree nodes rather than globals. */
3319 if (gimple_has_location (stmt))
3320 input_location = gimple_location (stmt);
3322 expand_gimple_stmt_1 (stmt);
3324 /* Free any temporaries used to evaluate this statement. */
3325 free_temp_slots ();
3327 input_location = saved_location;
3329 /* Mark all insns that may trap. */
3330 lp_nr = lookup_stmt_eh_lp (stmt);
3331 if (lp_nr)
3333 rtx insn;
3334 for (insn = next_real_insn (last); insn;
3335 insn = next_real_insn (insn))
3337 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3338 /* If we want exceptions for non-call insns, any
3339 may_trap_p instruction may throw. */
3340 && GET_CODE (PATTERN (insn)) != CLOBBER
3341 && GET_CODE (PATTERN (insn)) != USE
3342 && insn_could_throw_p (insn))
3343 make_reg_eh_region_note (insn, 0, lp_nr);
3347 return last;
3350 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3351 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3352 generated a tail call (something that might be denied by the ABI
3353 rules governing the call; see calls.c).
3355 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3356 can still reach the rest of BB. The case here is __builtin_sqrt,
3357 where the NaN result goes through the external function (with a
3358 tailcall) and the normal result happens via a sqrt instruction. */
3360 static basic_block
3361 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
3363 rtx last2, last;
3364 edge e;
3365 edge_iterator ei;
3366 int probability;
3367 gcov_type count;
3369 last2 = last = expand_gimple_stmt (stmt);
3371 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3372 if (CALL_P (last) && SIBLING_CALL_P (last))
3373 goto found;
3375 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3377 *can_fallthru = true;
3378 return NULL;
3380 found:
3381 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3382 Any instructions emitted here are about to be deleted. */
3383 do_pending_stack_adjust ();
3385 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3386 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3387 EH or abnormal edges, we shouldn't have created a tail call in
3388 the first place. So it seems to me we should just be removing
3389 all edges here, or redirecting the existing fallthru edge to
3390 the exit block. */
3392 probability = 0;
3393 count = 0;
3395 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3397 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3399 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3401 e->dest->count -= e->count;
3402 e->dest->frequency -= EDGE_FREQUENCY (e);
3403 if (e->dest->count < 0)
3404 e->dest->count = 0;
3405 if (e->dest->frequency < 0)
3406 e->dest->frequency = 0;
3408 count += e->count;
3409 probability += e->probability;
3410 remove_edge (e);
3412 else
3413 ei_next (&ei);
3416 /* This is somewhat ugly: the call_expr expander often emits instructions
3417 after the sibcall (to perform the function return). These confuse the
3418 find_many_sub_basic_blocks code, so we need to get rid of these. */
3419 last = NEXT_INSN (last);
3420 gcc_assert (BARRIER_P (last));
3422 *can_fallthru = false;
3423 while (NEXT_INSN (last))
3425 /* For instance an sqrt builtin expander expands if with
3426 sibcall in the then and label for `else`. */
3427 if (LABEL_P (NEXT_INSN (last)))
3429 *can_fallthru = true;
3430 break;
3432 delete_insn (NEXT_INSN (last));
3435 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3436 | EDGE_SIBCALL);
3437 e->probability += probability;
3438 e->count += count;
3439 BB_END (bb) = last;
3440 update_bb_for_insn (bb);
3442 if (NEXT_INSN (last))
3444 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3446 last = BB_END (bb);
3447 if (BARRIER_P (last))
3448 BB_END (bb) = PREV_INSN (last);
3451 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3453 return bb;
3456 /* Return the difference between the floor and the truncated result of
3457 a signed division by OP1 with remainder MOD. */
3458 static rtx
3459 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3461 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3462 return gen_rtx_IF_THEN_ELSE
3463 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3464 gen_rtx_IF_THEN_ELSE
3465 (mode, gen_rtx_LT (BImode,
3466 gen_rtx_DIV (mode, op1, mod),
3467 const0_rtx),
3468 constm1_rtx, const0_rtx),
3469 const0_rtx);
3472 /* Return the difference between the ceil and the truncated result of
3473 a signed division by OP1 with remainder MOD. */
3474 static rtx
3475 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3477 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3478 return gen_rtx_IF_THEN_ELSE
3479 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3480 gen_rtx_IF_THEN_ELSE
3481 (mode, gen_rtx_GT (BImode,
3482 gen_rtx_DIV (mode, op1, mod),
3483 const0_rtx),
3484 const1_rtx, const0_rtx),
3485 const0_rtx);
3488 /* Return the difference between the ceil and the truncated result of
3489 an unsigned division by OP1 with remainder MOD. */
3490 static rtx
3491 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3493 /* (mod != 0 ? 1 : 0) */
3494 return gen_rtx_IF_THEN_ELSE
3495 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3496 const1_rtx, const0_rtx);
3499 /* Return the difference between the rounded and the truncated result
3500 of a signed division by OP1 with remainder MOD. Halfway cases are
3501 rounded away from zero, rather than to the nearest even number. */
3502 static rtx
3503 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3505 /* (abs (mod) >= abs (op1) - abs (mod)
3506 ? (op1 / mod > 0 ? 1 : -1)
3507 : 0) */
3508 return gen_rtx_IF_THEN_ELSE
3509 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3510 gen_rtx_MINUS (mode,
3511 gen_rtx_ABS (mode, op1),
3512 gen_rtx_ABS (mode, mod))),
3513 gen_rtx_IF_THEN_ELSE
3514 (mode, gen_rtx_GT (BImode,
3515 gen_rtx_DIV (mode, op1, mod),
3516 const0_rtx),
3517 const1_rtx, constm1_rtx),
3518 const0_rtx);
3521 /* Return the difference between the rounded and the truncated result
3522 of a unsigned division by OP1 with remainder MOD. Halfway cases
3523 are rounded away from zero, rather than to the nearest even
3524 number. */
3525 static rtx
3526 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3528 /* (mod >= op1 - mod ? 1 : 0) */
3529 return gen_rtx_IF_THEN_ELSE
3530 (mode, gen_rtx_GE (BImode, mod,
3531 gen_rtx_MINUS (mode, op1, mod)),
3532 const1_rtx, const0_rtx);
3535 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3536 any rtl. */
3538 static rtx
3539 convert_debug_memory_address (enum machine_mode mode, rtx x,
3540 addr_space_t as)
3542 enum machine_mode xmode = GET_MODE (x);
3544 #ifndef POINTERS_EXTEND_UNSIGNED
3545 gcc_assert (mode == Pmode
3546 || mode == targetm.addr_space.address_mode (as));
3547 gcc_assert (xmode == mode || xmode == VOIDmode);
3548 #else
3549 rtx temp;
3551 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3553 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3554 return x;
3556 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3557 x = simplify_gen_subreg (mode, x, xmode,
3558 subreg_lowpart_offset
3559 (mode, xmode));
3560 else if (POINTERS_EXTEND_UNSIGNED > 0)
3561 x = gen_rtx_ZERO_EXTEND (mode, x);
3562 else if (!POINTERS_EXTEND_UNSIGNED)
3563 x = gen_rtx_SIGN_EXTEND (mode, x);
3564 else
3566 switch (GET_CODE (x))
3568 case SUBREG:
3569 if ((SUBREG_PROMOTED_VAR_P (x)
3570 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3571 || (GET_CODE (SUBREG_REG (x)) == PLUS
3572 && REG_P (XEXP (SUBREG_REG (x), 0))
3573 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3574 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3575 && GET_MODE (SUBREG_REG (x)) == mode)
3576 return SUBREG_REG (x);
3577 break;
3578 case LABEL_REF:
3579 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
3580 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3581 return temp;
3582 case SYMBOL_REF:
3583 temp = shallow_copy_rtx (x);
3584 PUT_MODE (temp, mode);
3585 return temp;
3586 case CONST:
3587 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3588 if (temp)
3589 temp = gen_rtx_CONST (mode, temp);
3590 return temp;
3591 case PLUS:
3592 case MINUS:
3593 if (CONST_INT_P (XEXP (x, 1)))
3595 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3596 if (temp)
3597 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3599 break;
3600 default:
3601 break;
3603 /* Don't know how to express ptr_extend as operation in debug info. */
3604 return NULL;
3606 #endif /* POINTERS_EXTEND_UNSIGNED */
3608 return x;
3611 /* Return an RTX equivalent to the value of the parameter DECL. */
3613 static rtx
3614 expand_debug_parm_decl (tree decl)
3616 rtx incoming = DECL_INCOMING_RTL (decl);
3618 if (incoming
3619 && GET_MODE (incoming) != BLKmode
3620 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3621 || (MEM_P (incoming)
3622 && REG_P (XEXP (incoming, 0))
3623 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3625 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3627 #ifdef HAVE_window_save
3628 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3629 If the target machine has an explicit window save instruction, the
3630 actual entry value is the corresponding OUTGOING_REGNO instead. */
3631 if (REG_P (incoming)
3632 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3633 incoming
3634 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3635 OUTGOING_REGNO (REGNO (incoming)), 0);
3636 else if (MEM_P (incoming))
3638 rtx reg = XEXP (incoming, 0);
3639 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3641 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3642 incoming = replace_equiv_address_nv (incoming, reg);
3644 else
3645 incoming = copy_rtx (incoming);
3647 #endif
3649 ENTRY_VALUE_EXP (rtl) = incoming;
3650 return rtl;
3653 if (incoming
3654 && GET_MODE (incoming) != BLKmode
3655 && !TREE_ADDRESSABLE (decl)
3656 && MEM_P (incoming)
3657 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3658 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3659 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3660 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3661 return copy_rtx (incoming);
3663 return NULL_RTX;
3666 /* Return an RTX equivalent to the value of the tree expression EXP. */
3668 static rtx
3669 expand_debug_expr (tree exp)
3671 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3672 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3673 enum machine_mode inner_mode = VOIDmode;
3674 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3675 addr_space_t as;
3677 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3679 case tcc_expression:
3680 switch (TREE_CODE (exp))
3682 case COND_EXPR:
3683 case DOT_PROD_EXPR:
3684 case WIDEN_MULT_PLUS_EXPR:
3685 case WIDEN_MULT_MINUS_EXPR:
3686 case FMA_EXPR:
3687 goto ternary;
3689 case TRUTH_ANDIF_EXPR:
3690 case TRUTH_ORIF_EXPR:
3691 case TRUTH_AND_EXPR:
3692 case TRUTH_OR_EXPR:
3693 case TRUTH_XOR_EXPR:
3694 goto binary;
3696 case TRUTH_NOT_EXPR:
3697 goto unary;
3699 default:
3700 break;
3702 break;
3704 ternary:
3705 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3706 if (!op2)
3707 return NULL_RTX;
3708 /* Fall through. */
3710 binary:
3711 case tcc_binary:
3712 case tcc_comparison:
3713 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3714 if (!op1)
3715 return NULL_RTX;
3716 /* Fall through. */
3718 unary:
3719 case tcc_unary:
3720 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3721 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3722 if (!op0)
3723 return NULL_RTX;
3724 break;
3726 case tcc_type:
3727 case tcc_statement:
3728 gcc_unreachable ();
3730 case tcc_constant:
3731 case tcc_exceptional:
3732 case tcc_declaration:
3733 case tcc_reference:
3734 case tcc_vl_exp:
3735 break;
3738 switch (TREE_CODE (exp))
3740 case STRING_CST:
3741 if (!lookup_constant_def (exp))
3743 if (strlen (TREE_STRING_POINTER (exp)) + 1
3744 != (size_t) TREE_STRING_LENGTH (exp))
3745 return NULL_RTX;
3746 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3747 op0 = gen_rtx_MEM (BLKmode, op0);
3748 set_mem_attributes (op0, exp, 0);
3749 return op0;
3751 /* Fall through... */
3753 case INTEGER_CST:
3754 case REAL_CST:
3755 case FIXED_CST:
3756 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3757 return op0;
3759 case COMPLEX_CST:
3760 gcc_assert (COMPLEX_MODE_P (mode));
3761 op0 = expand_debug_expr (TREE_REALPART (exp));
3762 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3763 return gen_rtx_CONCAT (mode, op0, op1);
3765 case DEBUG_EXPR_DECL:
3766 op0 = DECL_RTL_IF_SET (exp);
3768 if (op0)
3769 return op0;
3771 op0 = gen_rtx_DEBUG_EXPR (mode);
3772 DEBUG_EXPR_TREE_DECL (op0) = exp;
3773 SET_DECL_RTL (exp, op0);
3775 return op0;
3777 case VAR_DECL:
3778 case PARM_DECL:
3779 case FUNCTION_DECL:
3780 case LABEL_DECL:
3781 case CONST_DECL:
3782 case RESULT_DECL:
3783 op0 = DECL_RTL_IF_SET (exp);
3785 /* This decl was probably optimized away. */
3786 if (!op0)
3788 if (TREE_CODE (exp) != VAR_DECL
3789 || DECL_EXTERNAL (exp)
3790 || !TREE_STATIC (exp)
3791 || !DECL_NAME (exp)
3792 || DECL_HARD_REGISTER (exp)
3793 || DECL_IN_CONSTANT_POOL (exp)
3794 || mode == VOIDmode)
3795 return NULL;
3797 op0 = make_decl_rtl_for_debug (exp);
3798 if (!MEM_P (op0)
3799 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3800 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3801 return NULL;
3803 else
3804 op0 = copy_rtx (op0);
3806 if (GET_MODE (op0) == BLKmode
3807 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3808 below would ICE. While it is likely a FE bug,
3809 try to be robust here. See PR43166. */
3810 || mode == BLKmode
3811 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3813 gcc_assert (MEM_P (op0));
3814 op0 = adjust_address_nv (op0, mode, 0);
3815 return op0;
3818 /* Fall through. */
3820 adjust_mode:
3821 case PAREN_EXPR:
3822 case NOP_EXPR:
3823 case CONVERT_EXPR:
3825 inner_mode = GET_MODE (op0);
3827 if (mode == inner_mode)
3828 return op0;
3830 if (inner_mode == VOIDmode)
3832 if (TREE_CODE (exp) == SSA_NAME)
3833 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3834 else
3835 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3836 if (mode == inner_mode)
3837 return op0;
3840 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3842 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3843 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3844 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3845 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3846 else
3847 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3849 else if (FLOAT_MODE_P (mode))
3851 gcc_assert (TREE_CODE (exp) != SSA_NAME);
3852 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3853 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3854 else
3855 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3857 else if (FLOAT_MODE_P (inner_mode))
3859 if (unsignedp)
3860 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3861 else
3862 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3864 else if (CONSTANT_P (op0)
3865 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3866 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3867 subreg_lowpart_offset (mode,
3868 inner_mode));
3869 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3870 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3871 : unsignedp)
3872 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3873 else
3874 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3876 return op0;
3879 case MEM_REF:
3880 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3882 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3883 TREE_OPERAND (exp, 0),
3884 TREE_OPERAND (exp, 1));
3885 if (newexp)
3886 return expand_debug_expr (newexp);
3888 /* FALLTHROUGH */
3889 case INDIRECT_REF:
3890 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3891 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3892 if (!op0)
3893 return NULL;
3895 if (TREE_CODE (exp) == MEM_REF)
3897 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3898 || (GET_CODE (op0) == PLUS
3899 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3900 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3901 Instead just use get_inner_reference. */
3902 goto component_ref;
3904 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3905 if (!op1 || !CONST_INT_P (op1))
3906 return NULL;
3908 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
3911 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3912 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3913 else
3914 as = ADDR_SPACE_GENERIC;
3916 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3917 op0, as);
3918 if (op0 == NULL_RTX)
3919 return NULL;
3921 op0 = gen_rtx_MEM (mode, op0);
3922 set_mem_attributes (op0, exp, 0);
3923 if (TREE_CODE (exp) == MEM_REF
3924 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3925 set_mem_expr (op0, NULL_TREE);
3926 set_mem_addr_space (op0, as);
3928 return op0;
3930 case TARGET_MEM_REF:
3931 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3932 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
3933 return NULL;
3935 op0 = expand_debug_expr
3936 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
3937 if (!op0)
3938 return NULL;
3940 if (POINTER_TYPE_P (TREE_TYPE (exp)))
3941 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
3942 else
3943 as = ADDR_SPACE_GENERIC;
3945 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3946 op0, as);
3947 if (op0 == NULL_RTX)
3948 return NULL;
3950 op0 = gen_rtx_MEM (mode, op0);
3952 set_mem_attributes (op0, exp, 0);
3953 set_mem_addr_space (op0, as);
3955 return op0;
3957 component_ref:
3958 case ARRAY_REF:
3959 case ARRAY_RANGE_REF:
3960 case COMPONENT_REF:
3961 case BIT_FIELD_REF:
3962 case REALPART_EXPR:
3963 case IMAGPART_EXPR:
3964 case VIEW_CONVERT_EXPR:
3966 enum machine_mode mode1;
3967 HOST_WIDE_INT bitsize, bitpos;
3968 tree offset;
3969 int volatilep = 0;
3970 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3971 &mode1, &unsignedp, &volatilep, false);
3972 rtx orig_op0;
3974 if (bitsize == 0)
3975 return NULL;
3977 orig_op0 = op0 = expand_debug_expr (tem);
3979 if (!op0)
3980 return NULL;
3982 if (offset)
3984 enum machine_mode addrmode, offmode;
3986 if (!MEM_P (op0))
3987 return NULL;
3989 op0 = XEXP (op0, 0);
3990 addrmode = GET_MODE (op0);
3991 if (addrmode == VOIDmode)
3992 addrmode = Pmode;
3994 op1 = expand_debug_expr (offset);
3995 if (!op1)
3996 return NULL;
3998 offmode = GET_MODE (op1);
3999 if (offmode == VOIDmode)
4000 offmode = TYPE_MODE (TREE_TYPE (offset));
4002 if (addrmode != offmode)
4003 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4004 subreg_lowpart_offset (addrmode,
4005 offmode));
4007 /* Don't use offset_address here, we don't need a
4008 recognizable address, and we don't want to generate
4009 code. */
4010 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4011 op0, op1));
4014 if (MEM_P (op0))
4016 if (mode1 == VOIDmode)
4017 /* Bitfield. */
4018 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4019 if (bitpos >= BITS_PER_UNIT)
4021 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4022 bitpos %= BITS_PER_UNIT;
4024 else if (bitpos < 0)
4026 HOST_WIDE_INT units
4027 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4028 op0 = adjust_address_nv (op0, mode1, units);
4029 bitpos += units * BITS_PER_UNIT;
4031 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4032 op0 = adjust_address_nv (op0, mode, 0);
4033 else if (GET_MODE (op0) != mode1)
4034 op0 = adjust_address_nv (op0, mode1, 0);
4035 else
4036 op0 = copy_rtx (op0);
4037 if (op0 == orig_op0)
4038 op0 = shallow_copy_rtx (op0);
4039 set_mem_attributes (op0, exp, 0);
4042 if (bitpos == 0 && mode == GET_MODE (op0))
4043 return op0;
4045 if (bitpos < 0)
4046 return NULL;
4048 if (GET_MODE (op0) == BLKmode)
4049 return NULL;
4051 if ((bitpos % BITS_PER_UNIT) == 0
4052 && bitsize == GET_MODE_BITSIZE (mode1))
4054 enum machine_mode opmode = GET_MODE (op0);
4056 if (opmode == VOIDmode)
4057 opmode = TYPE_MODE (TREE_TYPE (tem));
4059 /* This condition may hold if we're expanding the address
4060 right past the end of an array that turned out not to
4061 be addressable (i.e., the address was only computed in
4062 debug stmts). The gen_subreg below would rightfully
4063 crash, and the address doesn't really exist, so just
4064 drop it. */
4065 if (bitpos >= GET_MODE_BITSIZE (opmode))
4066 return NULL;
4068 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4069 return simplify_gen_subreg (mode, op0, opmode,
4070 bitpos / BITS_PER_UNIT);
4073 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4074 && TYPE_UNSIGNED (TREE_TYPE (exp))
4075 ? SIGN_EXTRACT
4076 : ZERO_EXTRACT, mode,
4077 GET_MODE (op0) != VOIDmode
4078 ? GET_MODE (op0)
4079 : TYPE_MODE (TREE_TYPE (tem)),
4080 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4083 case ABS_EXPR:
4084 return simplify_gen_unary (ABS, mode, op0, mode);
4086 case NEGATE_EXPR:
4087 return simplify_gen_unary (NEG, mode, op0, mode);
4089 case BIT_NOT_EXPR:
4090 return simplify_gen_unary (NOT, mode, op0, mode);
4092 case FLOAT_EXPR:
4093 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4094 0)))
4095 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4096 inner_mode);
4098 case FIX_TRUNC_EXPR:
4099 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4100 inner_mode);
4102 case POINTER_PLUS_EXPR:
4103 /* For the rare target where pointers are not the same size as
4104 size_t, we need to check for mis-matched modes and correct
4105 the addend. */
4106 if (op0 && op1
4107 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4108 && GET_MODE (op0) != GET_MODE (op1))
4110 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4111 /* If OP0 is a partial mode, then we must truncate, even if it has
4112 the same bitsize as OP1 as GCC's representation of partial modes
4113 is opaque. */
4114 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4115 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4116 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4117 GET_MODE (op1));
4118 else
4119 /* We always sign-extend, regardless of the signedness of
4120 the operand, because the operand is always unsigned
4121 here even if the original C expression is signed. */
4122 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4123 GET_MODE (op1));
4125 /* Fall through. */
4126 case PLUS_EXPR:
4127 return simplify_gen_binary (PLUS, mode, op0, op1);
4129 case MINUS_EXPR:
4130 return simplify_gen_binary (MINUS, mode, op0, op1);
4132 case MULT_EXPR:
4133 return simplify_gen_binary (MULT, mode, op0, op1);
4135 case RDIV_EXPR:
4136 case TRUNC_DIV_EXPR:
4137 case EXACT_DIV_EXPR:
4138 if (unsignedp)
4139 return simplify_gen_binary (UDIV, mode, op0, op1);
4140 else
4141 return simplify_gen_binary (DIV, mode, op0, op1);
4143 case TRUNC_MOD_EXPR:
4144 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4146 case FLOOR_DIV_EXPR:
4147 if (unsignedp)
4148 return simplify_gen_binary (UDIV, mode, op0, op1);
4149 else
4151 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4152 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4153 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4154 return simplify_gen_binary (PLUS, mode, div, adj);
4157 case FLOOR_MOD_EXPR:
4158 if (unsignedp)
4159 return simplify_gen_binary (UMOD, mode, op0, op1);
4160 else
4162 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4163 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4164 adj = simplify_gen_unary (NEG, mode,
4165 simplify_gen_binary (MULT, mode, adj, op1),
4166 mode);
4167 return simplify_gen_binary (PLUS, mode, mod, adj);
4170 case CEIL_DIV_EXPR:
4171 if (unsignedp)
4173 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4174 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4175 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4176 return simplify_gen_binary (PLUS, mode, div, adj);
4178 else
4180 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4181 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4182 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4183 return simplify_gen_binary (PLUS, mode, div, adj);
4186 case CEIL_MOD_EXPR:
4187 if (unsignedp)
4189 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4190 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4191 adj = simplify_gen_unary (NEG, mode,
4192 simplify_gen_binary (MULT, mode, adj, op1),
4193 mode);
4194 return simplify_gen_binary (PLUS, mode, mod, adj);
4196 else
4198 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4199 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4200 adj = simplify_gen_unary (NEG, mode,
4201 simplify_gen_binary (MULT, mode, adj, op1),
4202 mode);
4203 return simplify_gen_binary (PLUS, mode, mod, adj);
4206 case ROUND_DIV_EXPR:
4207 if (unsignedp)
4209 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4210 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4211 rtx adj = round_udiv_adjust (mode, mod, op1);
4212 return simplify_gen_binary (PLUS, mode, div, adj);
4214 else
4216 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4217 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4218 rtx adj = round_sdiv_adjust (mode, mod, op1);
4219 return simplify_gen_binary (PLUS, mode, div, adj);
4222 case ROUND_MOD_EXPR:
4223 if (unsignedp)
4225 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4226 rtx adj = round_udiv_adjust (mode, mod, op1);
4227 adj = simplify_gen_unary (NEG, mode,
4228 simplify_gen_binary (MULT, mode, adj, op1),
4229 mode);
4230 return simplify_gen_binary (PLUS, mode, mod, adj);
4232 else
4234 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4235 rtx adj = round_sdiv_adjust (mode, mod, op1);
4236 adj = simplify_gen_unary (NEG, mode,
4237 simplify_gen_binary (MULT, mode, adj, op1),
4238 mode);
4239 return simplify_gen_binary (PLUS, mode, mod, adj);
4242 case LSHIFT_EXPR:
4243 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4245 case RSHIFT_EXPR:
4246 if (unsignedp)
4247 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4248 else
4249 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4251 case LROTATE_EXPR:
4252 return simplify_gen_binary (ROTATE, mode, op0, op1);
4254 case RROTATE_EXPR:
4255 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4257 case MIN_EXPR:
4258 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4260 case MAX_EXPR:
4261 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4263 case BIT_AND_EXPR:
4264 case TRUTH_AND_EXPR:
4265 return simplify_gen_binary (AND, mode, op0, op1);
4267 case BIT_IOR_EXPR:
4268 case TRUTH_OR_EXPR:
4269 return simplify_gen_binary (IOR, mode, op0, op1);
4271 case BIT_XOR_EXPR:
4272 case TRUTH_XOR_EXPR:
4273 return simplify_gen_binary (XOR, mode, op0, op1);
4275 case TRUTH_ANDIF_EXPR:
4276 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4278 case TRUTH_ORIF_EXPR:
4279 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4281 case TRUTH_NOT_EXPR:
4282 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4284 case LT_EXPR:
4285 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4286 op0, op1);
4288 case LE_EXPR:
4289 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4290 op0, op1);
4292 case GT_EXPR:
4293 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4294 op0, op1);
4296 case GE_EXPR:
4297 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4298 op0, op1);
4300 case EQ_EXPR:
4301 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4303 case NE_EXPR:
4304 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4306 case UNORDERED_EXPR:
4307 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4309 case ORDERED_EXPR:
4310 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4312 case UNLT_EXPR:
4313 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4315 case UNLE_EXPR:
4316 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4318 case UNGT_EXPR:
4319 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4321 case UNGE_EXPR:
4322 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4324 case UNEQ_EXPR:
4325 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4327 case LTGT_EXPR:
4328 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4330 case COND_EXPR:
4331 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4333 case COMPLEX_EXPR:
4334 gcc_assert (COMPLEX_MODE_P (mode));
4335 if (GET_MODE (op0) == VOIDmode)
4336 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4337 if (GET_MODE (op1) == VOIDmode)
4338 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4339 return gen_rtx_CONCAT (mode, op0, op1);
4341 case CONJ_EXPR:
4342 if (GET_CODE (op0) == CONCAT)
4343 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4344 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4345 XEXP (op0, 1),
4346 GET_MODE_INNER (mode)));
4347 else
4349 enum machine_mode imode = GET_MODE_INNER (mode);
4350 rtx re, im;
4352 if (MEM_P (op0))
4354 re = adjust_address_nv (op0, imode, 0);
4355 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4357 else
4359 enum machine_mode ifmode = int_mode_for_mode (mode);
4360 enum machine_mode ihmode = int_mode_for_mode (imode);
4361 rtx halfsize;
4362 if (ifmode == BLKmode || ihmode == BLKmode)
4363 return NULL;
4364 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4365 re = op0;
4366 if (mode != ifmode)
4367 re = gen_rtx_SUBREG (ifmode, re, 0);
4368 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4369 if (imode != ihmode)
4370 re = gen_rtx_SUBREG (imode, re, 0);
4371 im = copy_rtx (op0);
4372 if (mode != ifmode)
4373 im = gen_rtx_SUBREG (ifmode, im, 0);
4374 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4375 if (imode != ihmode)
4376 im = gen_rtx_SUBREG (imode, im, 0);
4378 im = gen_rtx_NEG (imode, im);
4379 return gen_rtx_CONCAT (mode, re, im);
4382 case ADDR_EXPR:
4383 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4384 if (!op0 || !MEM_P (op0))
4386 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4387 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4388 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4389 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4390 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4391 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4393 if (handled_component_p (TREE_OPERAND (exp, 0)))
4395 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4396 tree decl
4397 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4398 &bitoffset, &bitsize, &maxsize);
4399 if ((TREE_CODE (decl) == VAR_DECL
4400 || TREE_CODE (decl) == PARM_DECL
4401 || TREE_CODE (decl) == RESULT_DECL)
4402 && (!TREE_ADDRESSABLE (decl)
4403 || target_for_debug_bind (decl))
4404 && (bitoffset % BITS_PER_UNIT) == 0
4405 && bitsize > 0
4406 && bitsize == maxsize)
4408 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4409 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4414 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4415 == ADDR_EXPR)
4417 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4418 0));
4419 if (op0 != NULL
4420 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4421 || (GET_CODE (op0) == PLUS
4422 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4423 && CONST_INT_P (XEXP (op0, 1)))))
4425 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4426 1));
4427 if (!op1 || !CONST_INT_P (op1))
4428 return NULL;
4430 return plus_constant (mode, op0, INTVAL (op1));
4434 return NULL;
4437 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
4438 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4440 return op0;
4442 case VECTOR_CST:
4444 unsigned i;
4446 op0 = gen_rtx_CONCATN
4447 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4449 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4451 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4452 if (!op1)
4453 return NULL;
4454 XVECEXP (op0, 0, i) = op1;
4457 return op0;
4460 case CONSTRUCTOR:
4461 if (TREE_CLOBBER_P (exp))
4462 return NULL;
4463 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4465 unsigned i;
4466 tree val;
4468 op0 = gen_rtx_CONCATN
4469 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4471 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4473 op1 = expand_debug_expr (val);
4474 if (!op1)
4475 return NULL;
4476 XVECEXP (op0, 0, i) = op1;
4479 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4481 op1 = expand_debug_expr
4482 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4484 if (!op1)
4485 return NULL;
4487 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4488 XVECEXP (op0, 0, i) = op1;
4491 return op0;
4493 else
4494 goto flag_unsupported;
4496 case CALL_EXPR:
4497 /* ??? Maybe handle some builtins? */
4498 return NULL;
4500 case SSA_NAME:
4502 gimple g = get_gimple_for_ssa_name (exp);
4503 if (g)
4505 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4506 if (!op0)
4507 return NULL;
4509 else
4511 int part = var_to_partition (SA.map, exp);
4513 if (part == NO_PARTITION)
4515 /* If this is a reference to an incoming value of parameter
4516 that is never used in the code or where the incoming
4517 value is never used in the code, use PARM_DECL's
4518 DECL_RTL if set. */
4519 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4520 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4522 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4523 if (op0)
4524 goto adjust_mode;
4525 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4526 if (op0)
4527 goto adjust_mode;
4529 return NULL;
4532 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4534 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4536 goto adjust_mode;
4539 case ERROR_MARK:
4540 return NULL;
4542 /* Vector stuff. For most of the codes we don't have rtl codes. */
4543 case REALIGN_LOAD_EXPR:
4544 case REDUC_MAX_EXPR:
4545 case REDUC_MIN_EXPR:
4546 case REDUC_PLUS_EXPR:
4547 case VEC_COND_EXPR:
4548 case VEC_LSHIFT_EXPR:
4549 case VEC_PACK_FIX_TRUNC_EXPR:
4550 case VEC_PACK_SAT_EXPR:
4551 case VEC_PACK_TRUNC_EXPR:
4552 case VEC_RSHIFT_EXPR:
4553 case VEC_UNPACK_FLOAT_HI_EXPR:
4554 case VEC_UNPACK_FLOAT_LO_EXPR:
4555 case VEC_UNPACK_HI_EXPR:
4556 case VEC_UNPACK_LO_EXPR:
4557 case VEC_WIDEN_MULT_HI_EXPR:
4558 case VEC_WIDEN_MULT_LO_EXPR:
4559 case VEC_WIDEN_MULT_EVEN_EXPR:
4560 case VEC_WIDEN_MULT_ODD_EXPR:
4561 case VEC_WIDEN_LSHIFT_HI_EXPR:
4562 case VEC_WIDEN_LSHIFT_LO_EXPR:
4563 case VEC_PERM_EXPR:
4564 return NULL;
4566 /* Misc codes. */
4567 case ADDR_SPACE_CONVERT_EXPR:
4568 case FIXED_CONVERT_EXPR:
4569 case OBJ_TYPE_REF:
4570 case WITH_SIZE_EXPR:
4571 return NULL;
4573 case DOT_PROD_EXPR:
4574 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4575 && SCALAR_INT_MODE_P (mode))
4578 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4579 0)))
4580 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4581 inner_mode);
4583 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4584 1)))
4585 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4586 inner_mode);
4587 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4588 return simplify_gen_binary (PLUS, mode, op0, op2);
4590 return NULL;
4592 case WIDEN_MULT_EXPR:
4593 case WIDEN_MULT_PLUS_EXPR:
4594 case WIDEN_MULT_MINUS_EXPR:
4595 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4596 && SCALAR_INT_MODE_P (mode))
4598 inner_mode = GET_MODE (op0);
4599 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4600 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4601 else
4602 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4603 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4604 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4605 else
4606 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4607 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4608 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4609 return op0;
4610 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4611 return simplify_gen_binary (PLUS, mode, op0, op2);
4612 else
4613 return simplify_gen_binary (MINUS, mode, op2, op0);
4615 return NULL;
4617 case MULT_HIGHPART_EXPR:
4618 /* ??? Similar to the above. */
4619 return NULL;
4621 case WIDEN_SUM_EXPR:
4622 case WIDEN_LSHIFT_EXPR:
4623 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4624 && SCALAR_INT_MODE_P (mode))
4627 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4628 0)))
4629 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4630 inner_mode);
4631 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4632 ? ASHIFT : PLUS, mode, op0, op1);
4634 return NULL;
4636 case FMA_EXPR:
4637 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4639 default:
4640 flag_unsupported:
4641 #ifdef ENABLE_CHECKING
4642 debug_tree (exp);
4643 gcc_unreachable ();
4644 #else
4645 return NULL;
4646 #endif
4650 /* Return an RTX equivalent to the source bind value of the tree expression
4651 EXP. */
4653 static rtx
4654 expand_debug_source_expr (tree exp)
4656 rtx op0 = NULL_RTX;
4657 enum machine_mode mode = VOIDmode, inner_mode;
4659 switch (TREE_CODE (exp))
4661 case PARM_DECL:
4663 mode = DECL_MODE (exp);
4664 op0 = expand_debug_parm_decl (exp);
4665 if (op0)
4666 break;
4667 /* See if this isn't an argument that has been completely
4668 optimized out. */
4669 if (!DECL_RTL_SET_P (exp)
4670 && !DECL_INCOMING_RTL (exp)
4671 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4673 tree aexp = DECL_ORIGIN (exp);
4674 if (DECL_CONTEXT (aexp)
4675 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4677 vec<tree, va_gc> **debug_args;
4678 unsigned int ix;
4679 tree ddecl;
4680 debug_args = decl_debug_args_lookup (current_function_decl);
4681 if (debug_args != NULL)
4683 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4684 ix += 2)
4685 if (ddecl == aexp)
4686 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4690 break;
4692 default:
4693 break;
4696 if (op0 == NULL_RTX)
4697 return NULL_RTX;
4699 inner_mode = GET_MODE (op0);
4700 if (mode == inner_mode)
4701 return op0;
4703 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4705 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4706 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4707 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4708 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4709 else
4710 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4712 else if (FLOAT_MODE_P (mode))
4713 gcc_unreachable ();
4714 else if (FLOAT_MODE_P (inner_mode))
4716 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4717 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4718 else
4719 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4721 else if (CONSTANT_P (op0)
4722 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4723 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4724 subreg_lowpart_offset (mode, inner_mode));
4725 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4726 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4727 else
4728 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4730 return op0;
4733 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4734 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4735 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4737 static void
4738 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
4740 rtx exp = *exp_p;
4742 if (exp == NULL_RTX)
4743 return;
4745 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4746 return;
4748 if (depth == 4)
4750 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4751 rtx dval = make_debug_expr_from_rtl (exp);
4753 /* Emit a debug bind insn before INSN. */
4754 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4755 DEBUG_EXPR_TREE_DECL (dval), exp,
4756 VAR_INIT_STATUS_INITIALIZED);
4758 emit_debug_insn_before (bind, insn);
4759 *exp_p = dval;
4760 return;
4763 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4764 int i, j;
4765 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4766 switch (*format_ptr++)
4768 case 'e':
4769 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4770 break;
4772 case 'E':
4773 case 'V':
4774 for (j = 0; j < XVECLEN (exp, i); j++)
4775 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4776 break;
4778 default:
4779 break;
4783 /* Expand the _LOCs in debug insns. We run this after expanding all
4784 regular insns, so that any variables referenced in the function
4785 will have their DECL_RTLs set. */
4787 static void
4788 expand_debug_locations (void)
4790 rtx insn;
4791 rtx last = get_last_insn ();
4792 int save_strict_alias = flag_strict_aliasing;
4794 /* New alias sets while setting up memory attributes cause
4795 -fcompare-debug failures, even though it doesn't bring about any
4796 codegen changes. */
4797 flag_strict_aliasing = 0;
4799 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4800 if (DEBUG_INSN_P (insn))
4802 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4803 rtx val, prev_insn, insn2;
4804 enum machine_mode mode;
4806 if (value == NULL_TREE)
4807 val = NULL_RTX;
4808 else
4810 if (INSN_VAR_LOCATION_STATUS (insn)
4811 == VAR_INIT_STATUS_UNINITIALIZED)
4812 val = expand_debug_source_expr (value);
4813 else
4814 val = expand_debug_expr (value);
4815 gcc_assert (last == get_last_insn ());
4818 if (!val)
4819 val = gen_rtx_UNKNOWN_VAR_LOC ();
4820 else
4822 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4824 gcc_assert (mode == GET_MODE (val)
4825 || (GET_MODE (val) == VOIDmode
4826 && (CONST_SCALAR_INT_P (val)
4827 || GET_CODE (val) == CONST_FIXED
4828 || GET_CODE (val) == LABEL_REF)));
4831 INSN_VAR_LOCATION_LOC (insn) = val;
4832 prev_insn = PREV_INSN (insn);
4833 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4834 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
4837 flag_strict_aliasing = save_strict_alias;
4840 /* Expand basic block BB from GIMPLE trees to RTL. */
4842 static basic_block
4843 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
4845 gimple_stmt_iterator gsi;
4846 gimple_seq stmts;
4847 gimple stmt = NULL;
4848 rtx note, last;
4849 edge e;
4850 edge_iterator ei;
4851 void **elt;
4853 if (dump_file)
4854 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4855 bb->index);
4857 /* Note that since we are now transitioning from GIMPLE to RTL, we
4858 cannot use the gsi_*_bb() routines because they expect the basic
4859 block to be in GIMPLE, instead of RTL. Therefore, we need to
4860 access the BB sequence directly. */
4861 stmts = bb_seq (bb);
4862 bb->il.gimple.seq = NULL;
4863 bb->il.gimple.phi_nodes = NULL;
4864 rtl_profile_for_bb (bb);
4865 init_rtl_bb_info (bb);
4866 bb->flags |= BB_RTL;
4868 /* Remove the RETURN_EXPR if we may fall though to the exit
4869 instead. */
4870 gsi = gsi_last (stmts);
4871 if (!gsi_end_p (gsi)
4872 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
4874 gimple ret_stmt = gsi_stmt (gsi);
4876 gcc_assert (single_succ_p (bb));
4877 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
4879 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4880 && !gimple_return_retval (ret_stmt))
4882 gsi_remove (&gsi, false);
4883 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4887 gsi = gsi_start (stmts);
4888 if (!gsi_end_p (gsi))
4890 stmt = gsi_stmt (gsi);
4891 if (gimple_code (stmt) != GIMPLE_LABEL)
4892 stmt = NULL;
4895 elt = pointer_map_contains (lab_rtx_for_bb, bb);
4897 if (stmt || elt)
4899 last = get_last_insn ();
4901 if (stmt)
4903 expand_gimple_stmt (stmt);
4904 gsi_next (&gsi);
4907 if (elt)
4908 emit_label ((rtx) *elt);
4910 /* Java emits line number notes in the top of labels.
4911 ??? Make this go away once line number notes are obsoleted. */
4912 BB_HEAD (bb) = NEXT_INSN (last);
4913 if (NOTE_P (BB_HEAD (bb)))
4914 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
4915 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
4917 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4919 else
4920 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
4922 NOTE_BASIC_BLOCK (note) = bb;
4924 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4926 basic_block new_bb;
4928 stmt = gsi_stmt (gsi);
4930 /* If this statement is a non-debug one, and we generate debug
4931 insns, then this one might be the last real use of a TERed
4932 SSA_NAME, but where there are still some debug uses further
4933 down. Expanding the current SSA name in such further debug
4934 uses by their RHS might lead to wrong debug info, as coalescing
4935 might make the operands of such RHS be placed into the same
4936 pseudo as something else. Like so:
4937 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4938 use(a_1);
4939 a_2 = ...
4940 #DEBUG ... => a_1
4941 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4942 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4943 the write to a_2 would actually have clobbered the place which
4944 formerly held a_0.
4946 So, instead of that, we recognize the situation, and generate
4947 debug temporaries at the last real use of TERed SSA names:
4948 a_1 = a_0 + 1;
4949 #DEBUG #D1 => a_1
4950 use(a_1);
4951 a_2 = ...
4952 #DEBUG ... => #D1
4954 if (MAY_HAVE_DEBUG_INSNS
4955 && SA.values
4956 && !is_gimple_debug (stmt))
4958 ssa_op_iter iter;
4959 tree op;
4960 gimple def;
4962 location_t sloc = curr_insn_location ();
4964 /* Look for SSA names that have their last use here (TERed
4965 names always have only one real use). */
4966 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4967 if ((def = get_gimple_for_ssa_name (op)))
4969 imm_use_iterator imm_iter;
4970 use_operand_p use_p;
4971 bool have_debug_uses = false;
4973 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
4975 if (gimple_debug_bind_p (USE_STMT (use_p)))
4977 have_debug_uses = true;
4978 break;
4982 if (have_debug_uses)
4984 /* OP is a TERed SSA name, with DEF it's defining
4985 statement, and where OP is used in further debug
4986 instructions. Generate a debug temporary, and
4987 replace all uses of OP in debug insns with that
4988 temporary. */
4989 gimple debugstmt;
4990 tree value = gimple_assign_rhs_to_tree (def);
4991 tree vexpr = make_node (DEBUG_EXPR_DECL);
4992 rtx val;
4993 enum machine_mode mode;
4995 set_curr_insn_location (gimple_location (def));
4997 DECL_ARTIFICIAL (vexpr) = 1;
4998 TREE_TYPE (vexpr) = TREE_TYPE (value);
4999 if (DECL_P (value))
5000 mode = DECL_MODE (value);
5001 else
5002 mode = TYPE_MODE (TREE_TYPE (value));
5003 DECL_MODE (vexpr) = mode;
5005 val = gen_rtx_VAR_LOCATION
5006 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5008 emit_debug_insn (val);
5010 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5012 if (!gimple_debug_bind_p (debugstmt))
5013 continue;
5015 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5016 SET_USE (use_p, vexpr);
5018 update_stmt (debugstmt);
5022 set_curr_insn_location (sloc);
5025 currently_expanding_gimple_stmt = stmt;
5027 /* Expand this statement, then evaluate the resulting RTL and
5028 fixup the CFG accordingly. */
5029 if (gimple_code (stmt) == GIMPLE_COND)
5031 new_bb = expand_gimple_cond (bb, stmt);
5032 if (new_bb)
5033 return new_bb;
5035 else if (gimple_debug_bind_p (stmt))
5037 location_t sloc = curr_insn_location ();
5038 gimple_stmt_iterator nsi = gsi;
5040 for (;;)
5042 tree var = gimple_debug_bind_get_var (stmt);
5043 tree value;
5044 rtx val;
5045 enum machine_mode mode;
5047 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5048 && TREE_CODE (var) != LABEL_DECL
5049 && !target_for_debug_bind (var))
5050 goto delink_debug_stmt;
5052 if (gimple_debug_bind_has_value_p (stmt))
5053 value = gimple_debug_bind_get_value (stmt);
5054 else
5055 value = NULL_TREE;
5057 last = get_last_insn ();
5059 set_curr_insn_location (gimple_location (stmt));
5061 if (DECL_P (var))
5062 mode = DECL_MODE (var);
5063 else
5064 mode = TYPE_MODE (TREE_TYPE (var));
5066 val = gen_rtx_VAR_LOCATION
5067 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5069 emit_debug_insn (val);
5071 if (dump_file && (dump_flags & TDF_DETAILS))
5073 /* We can't dump the insn with a TREE where an RTX
5074 is expected. */
5075 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5076 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5077 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5080 delink_debug_stmt:
5081 /* In order not to generate too many debug temporaries,
5082 we delink all uses of debug statements we already expanded.
5083 Therefore debug statements between definition and real
5084 use of TERed SSA names will continue to use the SSA name,
5085 and not be replaced with debug temps. */
5086 delink_stmt_imm_use (stmt);
5088 gsi = nsi;
5089 gsi_next (&nsi);
5090 if (gsi_end_p (nsi))
5091 break;
5092 stmt = gsi_stmt (nsi);
5093 if (!gimple_debug_bind_p (stmt))
5094 break;
5097 set_curr_insn_location (sloc);
5099 else if (gimple_debug_source_bind_p (stmt))
5101 location_t sloc = curr_insn_location ();
5102 tree var = gimple_debug_source_bind_get_var (stmt);
5103 tree value = gimple_debug_source_bind_get_value (stmt);
5104 rtx val;
5105 enum machine_mode mode;
5107 last = get_last_insn ();
5109 set_curr_insn_location (gimple_location (stmt));
5111 mode = DECL_MODE (var);
5113 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5114 VAR_INIT_STATUS_UNINITIALIZED);
5116 emit_debug_insn (val);
5118 if (dump_file && (dump_flags & TDF_DETAILS))
5120 /* We can't dump the insn with a TREE where an RTX
5121 is expected. */
5122 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5123 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5124 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5127 set_curr_insn_location (sloc);
5129 else
5131 if (is_gimple_call (stmt)
5132 && gimple_call_tail_p (stmt)
5133 && disable_tail_calls)
5134 gimple_call_set_tail (stmt, false);
5136 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
5138 bool can_fallthru;
5139 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5140 if (new_bb)
5142 if (can_fallthru)
5143 bb = new_bb;
5144 else
5145 return new_bb;
5148 else
5150 def_operand_p def_p;
5151 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5153 if (def_p != NULL)
5155 /* Ignore this stmt if it is in the list of
5156 replaceable expressions. */
5157 if (SA.values
5158 && bitmap_bit_p (SA.values,
5159 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5160 continue;
5162 last = expand_gimple_stmt (stmt);
5163 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5168 currently_expanding_gimple_stmt = NULL;
5170 /* Expand implicit goto and convert goto_locus. */
5171 FOR_EACH_EDGE (e, ei, bb->succs)
5173 if (e->goto_locus != UNKNOWN_LOCATION)
5174 set_curr_insn_location (e->goto_locus);
5175 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5177 emit_jump (label_rtx_for_bb (e->dest));
5178 e->flags &= ~EDGE_FALLTHRU;
5182 /* Expanded RTL can create a jump in the last instruction of block.
5183 This later might be assumed to be a jump to successor and break edge insertion.
5184 We need to insert dummy move to prevent this. PR41440. */
5185 if (single_succ_p (bb)
5186 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5187 && (last = get_last_insn ())
5188 && JUMP_P (last))
5190 rtx dummy = gen_reg_rtx (SImode);
5191 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5194 do_pending_stack_adjust ();
5196 /* Find the block tail. The last insn in the block is the insn
5197 before a barrier and/or table jump insn. */
5198 last = get_last_insn ();
5199 if (BARRIER_P (last))
5200 last = PREV_INSN (last);
5201 if (JUMP_TABLE_DATA_P (last))
5202 last = PREV_INSN (PREV_INSN (last));
5203 BB_END (bb) = last;
5205 update_bb_for_insn (bb);
5207 return bb;
5211 /* Create a basic block for initialization code. */
5213 static basic_block
5214 construct_init_block (void)
5216 basic_block init_block, first_block;
5217 edge e = NULL;
5218 int flags;
5220 /* Multiple entry points not supported yet. */
5221 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5222 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5223 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5224 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5225 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5227 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5229 /* When entry edge points to first basic block, we don't need jump,
5230 otherwise we have to jump into proper target. */
5231 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5233 tree label = gimple_block_label (e->dest);
5235 emit_jump (label_rtx (label));
5236 flags = 0;
5238 else
5239 flags = EDGE_FALLTHRU;
5241 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5242 get_last_insn (),
5243 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5244 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5245 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5246 if (current_loops && ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father)
5247 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5248 if (e)
5250 first_block = e->dest;
5251 redirect_edge_succ (e, init_block);
5252 e = make_edge (init_block, first_block, flags);
5254 else
5255 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5256 e->probability = REG_BR_PROB_BASE;
5257 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5259 update_bb_for_insn (init_block);
5260 return init_block;
5263 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5264 found in the block tree. */
5266 static void
5267 set_block_levels (tree block, int level)
5269 while (block)
5271 BLOCK_NUMBER (block) = level;
5272 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5273 block = BLOCK_CHAIN (block);
5277 /* Create a block containing landing pads and similar stuff. */
5279 static void
5280 construct_exit_block (void)
5282 rtx head = get_last_insn ();
5283 rtx end;
5284 basic_block exit_block;
5285 edge e, e2;
5286 unsigned ix;
5287 edge_iterator ei;
5288 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5289 rtx orig_end = BB_END (prev_bb);
5291 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5293 /* Make sure the locus is set to the end of the function, so that
5294 epilogue line numbers and warnings are set properly. */
5295 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5296 input_location = cfun->function_end_locus;
5298 /* Generate rtl for function exit. */
5299 expand_function_end ();
5301 end = get_last_insn ();
5302 if (head == end)
5303 return;
5304 /* While emitting the function end we could move end of the last basic
5305 block. */
5306 BB_END (prev_bb) = orig_end;
5307 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5308 head = NEXT_INSN (head);
5309 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5310 bb frequency counting will be confused. Any instructions before that
5311 label are emitted for the case where PREV_BB falls through into the
5312 exit block, so append those instructions to prev_bb in that case. */
5313 if (NEXT_INSN (head) != return_label)
5315 while (NEXT_INSN (head) != return_label)
5317 if (!NOTE_P (NEXT_INSN (head)))
5318 BB_END (prev_bb) = NEXT_INSN (head);
5319 head = NEXT_INSN (head);
5322 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5323 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5324 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5325 if (current_loops && EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father)
5326 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5328 ix = 0;
5329 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5331 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5332 if (!(e->flags & EDGE_ABNORMAL))
5333 redirect_edge_succ (e, exit_block);
5334 else
5335 ix++;
5338 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5339 e->probability = REG_BR_PROB_BASE;
5340 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5341 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5342 if (e2 != e)
5344 e->count -= e2->count;
5345 exit_block->count -= e2->count;
5346 exit_block->frequency -= EDGE_FREQUENCY (e2);
5348 if (e->count < 0)
5349 e->count = 0;
5350 if (exit_block->count < 0)
5351 exit_block->count = 0;
5352 if (exit_block->frequency < 0)
5353 exit_block->frequency = 0;
5354 update_bb_for_insn (exit_block);
5357 /* Helper function for discover_nonconstant_array_refs.
5358 Look for ARRAY_REF nodes with non-constant indexes and mark them
5359 addressable. */
5361 static tree
5362 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5363 void *data ATTRIBUTE_UNUSED)
5365 tree t = *tp;
5367 if (IS_TYPE_OR_DECL_P (t))
5368 *walk_subtrees = 0;
5369 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5371 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5372 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5373 && (!TREE_OPERAND (t, 2)
5374 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5375 || (TREE_CODE (t) == COMPONENT_REF
5376 && (!TREE_OPERAND (t,2)
5377 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5378 || TREE_CODE (t) == BIT_FIELD_REF
5379 || TREE_CODE (t) == REALPART_EXPR
5380 || TREE_CODE (t) == IMAGPART_EXPR
5381 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5382 || CONVERT_EXPR_P (t))
5383 t = TREE_OPERAND (t, 0);
5385 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5387 t = get_base_address (t);
5388 if (t && DECL_P (t)
5389 && DECL_MODE (t) != BLKmode)
5390 TREE_ADDRESSABLE (t) = 1;
5393 *walk_subtrees = 0;
5396 return NULL_TREE;
5399 /* RTL expansion is not able to compile array references with variable
5400 offsets for arrays stored in single register. Discover such
5401 expressions and mark variables as addressable to avoid this
5402 scenario. */
5404 static void
5405 discover_nonconstant_array_refs (void)
5407 basic_block bb;
5408 gimple_stmt_iterator gsi;
5410 FOR_EACH_BB_FN (bb, cfun)
5411 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5413 gimple stmt = gsi_stmt (gsi);
5414 if (!is_gimple_debug (stmt))
5415 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5419 /* This function sets crtl->args.internal_arg_pointer to a virtual
5420 register if DRAP is needed. Local register allocator will replace
5421 virtual_incoming_args_rtx with the virtual register. */
5423 static void
5424 expand_stack_alignment (void)
5426 rtx drap_rtx;
5427 unsigned int preferred_stack_boundary;
5429 if (! SUPPORTS_STACK_ALIGNMENT)
5430 return;
5432 if (cfun->calls_alloca
5433 || cfun->has_nonlocal_label
5434 || crtl->has_nonlocal_goto)
5435 crtl->need_drap = true;
5437 /* Call update_stack_boundary here again to update incoming stack
5438 boundary. It may set incoming stack alignment to a different
5439 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5440 use the minimum incoming stack alignment to check if it is OK
5441 to perform sibcall optimization since sibcall optimization will
5442 only align the outgoing stack to incoming stack boundary. */
5443 if (targetm.calls.update_stack_boundary)
5444 targetm.calls.update_stack_boundary ();
5446 /* The incoming stack frame has to be aligned at least at
5447 parm_stack_boundary. */
5448 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5450 /* Update crtl->stack_alignment_estimated and use it later to align
5451 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5452 exceptions since callgraph doesn't collect incoming stack alignment
5453 in this case. */
5454 if (cfun->can_throw_non_call_exceptions
5455 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5456 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5457 else
5458 preferred_stack_boundary = crtl->preferred_stack_boundary;
5459 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5460 crtl->stack_alignment_estimated = preferred_stack_boundary;
5461 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5462 crtl->stack_alignment_needed = preferred_stack_boundary;
5464 gcc_assert (crtl->stack_alignment_needed
5465 <= crtl->stack_alignment_estimated);
5467 crtl->stack_realign_needed
5468 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5469 crtl->stack_realign_tried = crtl->stack_realign_needed;
5471 crtl->stack_realign_processed = true;
5473 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5474 alignment. */
5475 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5476 drap_rtx = targetm.calls.get_drap_rtx ();
5478 /* stack_realign_drap and drap_rtx must match. */
5479 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5481 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5482 if (NULL != drap_rtx)
5484 crtl->args.internal_arg_pointer = drap_rtx;
5486 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5487 needed. */
5488 fixup_tail_calls ();
5493 static void
5494 expand_main_function (void)
5496 #if (defined(INVOKE__main) \
5497 || (!defined(HAS_INIT_SECTION) \
5498 && !defined(INIT_SECTION_ASM_OP) \
5499 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5500 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5501 #endif
5505 /* Expand code to initialize the stack_protect_guard. This is invoked at
5506 the beginning of a function to be protected. */
5508 #ifndef HAVE_stack_protect_set
5509 # define HAVE_stack_protect_set 0
5510 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5511 #endif
5513 static void
5514 stack_protect_prologue (void)
5516 tree guard_decl = targetm.stack_protect_guard ();
5517 rtx x, y;
5519 x = expand_normal (crtl->stack_protect_guard);
5520 y = expand_normal (guard_decl);
5522 /* Allow the target to copy from Y to X without leaking Y into a
5523 register. */
5524 if (HAVE_stack_protect_set)
5526 rtx insn = gen_stack_protect_set (x, y);
5527 if (insn)
5529 emit_insn (insn);
5530 return;
5534 /* Otherwise do a straight move. */
5535 emit_move_insn (x, y);
5538 /* Translate the intermediate representation contained in the CFG
5539 from GIMPLE trees to RTL.
5541 We do conversion per basic block and preserve/update the tree CFG.
5542 This implies we have to do some magic as the CFG can simultaneously
5543 consist of basic blocks containing RTL and GIMPLE trees. This can
5544 confuse the CFG hooks, so be careful to not manipulate CFG during
5545 the expansion. */
5547 static unsigned int
5548 gimple_expand_cfg (void)
5550 basic_block bb, init_block;
5551 sbitmap blocks;
5552 edge_iterator ei;
5553 edge e;
5554 rtx var_seq, var_ret_seq;
5555 unsigned i;
5557 timevar_push (TV_OUT_OF_SSA);
5558 rewrite_out_of_ssa (&SA);
5559 timevar_pop (TV_OUT_OF_SSA);
5560 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5562 /* Make sure all values used by the optimization passes have sane
5563 defaults. */
5564 reg_renumber = 0;
5566 /* Some backends want to know that we are expanding to RTL. */
5567 currently_expanding_to_rtl = 1;
5568 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5569 free_dominance_info (CDI_DOMINATORS);
5571 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5573 insn_locations_init ();
5574 if (!DECL_IS_BUILTIN (current_function_decl))
5576 /* Eventually, all FEs should explicitly set function_start_locus. */
5577 if (LOCATION_LOCUS (cfun->function_start_locus) == UNKNOWN_LOCATION)
5578 set_curr_insn_location
5579 (DECL_SOURCE_LOCATION (current_function_decl));
5580 else
5581 set_curr_insn_location (cfun->function_start_locus);
5583 else
5584 set_curr_insn_location (UNKNOWN_LOCATION);
5585 prologue_location = curr_insn_location ();
5587 #ifdef INSN_SCHEDULING
5588 init_sched_attrs ();
5589 #endif
5591 /* Make sure first insn is a note even if we don't want linenums.
5592 This makes sure the first insn will never be deleted.
5593 Also, final expects a note to appear there. */
5594 emit_note (NOTE_INSN_DELETED);
5596 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5597 discover_nonconstant_array_refs ();
5599 targetm.expand_to_rtl_hook ();
5600 crtl->stack_alignment_needed = STACK_BOUNDARY;
5601 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5602 crtl->stack_alignment_estimated = 0;
5603 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5604 cfun->cfg->max_jumptable_ents = 0;
5606 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5607 of the function section at exapnsion time to predict distance of calls. */
5608 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5610 /* Expand the variables recorded during gimple lowering. */
5611 timevar_push (TV_VAR_EXPAND);
5612 start_sequence ();
5614 var_ret_seq = expand_used_vars ();
5616 var_seq = get_insns ();
5617 end_sequence ();
5618 timevar_pop (TV_VAR_EXPAND);
5620 /* Honor stack protection warnings. */
5621 if (warn_stack_protect)
5623 if (cfun->calls_alloca)
5624 warning (OPT_Wstack_protector,
5625 "stack protector not protecting local variables: "
5626 "variable length buffer");
5627 if (has_short_buffer && !crtl->stack_protect_guard)
5628 warning (OPT_Wstack_protector,
5629 "stack protector not protecting function: "
5630 "all local arrays are less than %d bytes long",
5631 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5634 /* Set up parameters and prepare for return, for the function. */
5635 expand_function_start (current_function_decl);
5637 /* If we emitted any instructions for setting up the variables,
5638 emit them before the FUNCTION_START note. */
5639 if (var_seq)
5641 emit_insn_before (var_seq, parm_birth_insn);
5643 /* In expand_function_end we'll insert the alloca save/restore
5644 before parm_birth_insn. We've just insertted an alloca call.
5645 Adjust the pointer to match. */
5646 parm_birth_insn = var_seq;
5649 /* Now that we also have the parameter RTXs, copy them over to our
5650 partitions. */
5651 for (i = 0; i < SA.map->num_partitions; i++)
5653 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5655 if (TREE_CODE (var) != VAR_DECL
5656 && !SA.partition_to_pseudo[i])
5657 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5658 gcc_assert (SA.partition_to_pseudo[i]);
5660 /* If this decl was marked as living in multiple places, reset
5661 this now to NULL. */
5662 if (DECL_RTL_IF_SET (var) == pc_rtx)
5663 SET_DECL_RTL (var, NULL);
5665 /* Some RTL parts really want to look at DECL_RTL(x) when x
5666 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5667 SET_DECL_RTL here making this available, but that would mean
5668 to select one of the potentially many RTLs for one DECL. Instead
5669 of doing that we simply reset the MEM_EXPR of the RTL in question,
5670 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5671 if (!DECL_RTL_SET_P (var))
5673 if (MEM_P (SA.partition_to_pseudo[i]))
5674 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5678 /* If we have a class containing differently aligned pointers
5679 we need to merge those into the corresponding RTL pointer
5680 alignment. */
5681 for (i = 1; i < num_ssa_names; i++)
5683 tree name = ssa_name (i);
5684 int part;
5685 rtx r;
5687 if (!name
5688 /* We might have generated new SSA names in
5689 update_alias_info_with_stack_vars. They will have a NULL
5690 defining statements, and won't be part of the partitioning,
5691 so ignore those. */
5692 || !SSA_NAME_DEF_STMT (name))
5693 continue;
5694 part = var_to_partition (SA.map, name);
5695 if (part == NO_PARTITION)
5696 continue;
5698 /* Adjust all partition members to get the underlying decl of
5699 the representative which we might have created in expand_one_var. */
5700 if (SSA_NAME_VAR (name) == NULL_TREE)
5702 tree leader = partition_to_var (SA.map, part);
5703 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5704 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5706 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5707 continue;
5709 r = SA.partition_to_pseudo[part];
5710 if (REG_P (r))
5711 mark_reg_pointer (r, get_pointer_alignment (name));
5714 /* If this function is `main', emit a call to `__main'
5715 to run global initializers, etc. */
5716 if (DECL_NAME (current_function_decl)
5717 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5718 && DECL_FILE_SCOPE_P (current_function_decl))
5719 expand_main_function ();
5721 /* Initialize the stack_protect_guard field. This must happen after the
5722 call to __main (if any) so that the external decl is initialized. */
5723 if (crtl->stack_protect_guard)
5724 stack_protect_prologue ();
5726 expand_phi_nodes (&SA);
5728 /* Register rtl specific functions for cfg. */
5729 rtl_register_cfg_hooks ();
5731 init_block = construct_init_block ();
5733 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5734 remaining edges later. */
5735 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
5736 e->flags &= ~EDGE_EXECUTABLE;
5738 lab_rtx_for_bb = pointer_map_create ();
5739 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (cfun),
5740 next_bb)
5741 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
5743 if (MAY_HAVE_DEBUG_INSNS)
5744 expand_debug_locations ();
5746 /* Free stuff we no longer need after GIMPLE optimizations. */
5747 free_dominance_info (CDI_DOMINATORS);
5748 free_dominance_info (CDI_POST_DOMINATORS);
5749 delete_tree_cfg_annotations ();
5751 timevar_push (TV_OUT_OF_SSA);
5752 finish_out_of_ssa (&SA);
5753 timevar_pop (TV_OUT_OF_SSA);
5755 timevar_push (TV_POST_EXPAND);
5756 /* We are no longer in SSA form. */
5757 cfun->gimple_df->in_ssa_p = false;
5758 if (current_loops)
5759 loops_state_clear (LOOP_CLOSED_SSA);
5761 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5762 conservatively to true until they are all profile aware. */
5763 pointer_map_destroy (lab_rtx_for_bb);
5764 free_histograms ();
5766 construct_exit_block ();
5767 insn_locations_finalize ();
5769 if (var_ret_seq)
5771 rtx after = return_label;
5772 rtx next = NEXT_INSN (after);
5773 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5774 after = next;
5775 emit_insn_after (var_ret_seq, after);
5778 /* Zap the tree EH table. */
5779 set_eh_throw_stmt_table (cfun, NULL);
5781 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5782 split edges which edge insertions might do. */
5783 rebuild_jump_labels (get_insns ());
5785 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
5786 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
5788 edge e;
5789 edge_iterator ei;
5790 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5792 if (e->insns.r)
5794 rebuild_jump_labels_chain (e->insns.r);
5795 /* Put insns after parm birth, but before
5796 NOTE_INSNS_FUNCTION_BEG. */
5797 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
5798 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5800 rtx insns = e->insns.r;
5801 e->insns.r = NULL_RTX;
5802 if (NOTE_P (parm_birth_insn)
5803 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5804 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5805 else
5806 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
5808 else
5809 commit_one_edge_insertion (e);
5811 else
5812 ei_next (&ei);
5816 /* We're done expanding trees to RTL. */
5817 currently_expanding_to_rtl = 0;
5819 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb,
5820 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
5822 edge e;
5823 edge_iterator ei;
5824 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5826 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5827 e->flags &= ~EDGE_EXECUTABLE;
5829 /* At the moment not all abnormal edges match the RTL
5830 representation. It is safe to remove them here as
5831 find_many_sub_basic_blocks will rediscover them.
5832 In the future we should get this fixed properly. */
5833 if ((e->flags & EDGE_ABNORMAL)
5834 && !(e->flags & EDGE_SIBCALL))
5835 remove_edge (e);
5836 else
5837 ei_next (&ei);
5841 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
5842 bitmap_ones (blocks);
5843 find_many_sub_basic_blocks (blocks);
5844 sbitmap_free (blocks);
5845 purge_all_dead_edges ();
5847 expand_stack_alignment ();
5849 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5850 function. */
5851 if (crtl->tail_call_emit)
5852 fixup_tail_calls ();
5854 /* After initial rtl generation, call back to finish generating
5855 exception support code. We need to do this before cleaning up
5856 the CFG as the code does not expect dead landing pads. */
5857 if (cfun->eh->region_tree != NULL)
5858 finish_eh_generation ();
5860 /* Remove unreachable blocks, otherwise we cannot compute dominators
5861 which are needed for loop state verification. As a side-effect
5862 this also compacts blocks.
5863 ??? We cannot remove trivially dead insns here as for example
5864 the DRAP reg on i?86 is not magically live at this point.
5865 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5866 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5868 #ifdef ENABLE_CHECKING
5869 verify_flow_info ();
5870 #endif
5872 /* Initialize pseudos allocated for hard registers. */
5873 emit_initial_value_sets ();
5875 /* And finally unshare all RTL. */
5876 unshare_all_rtl ();
5878 /* There's no need to defer outputting this function any more; we
5879 know we want to output it. */
5880 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5882 /* Now that we're done expanding trees to RTL, we shouldn't have any
5883 more CONCATs anywhere. */
5884 generating_concat_p = 0;
5886 if (dump_file)
5888 fprintf (dump_file,
5889 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5890 /* And the pass manager will dump RTL for us. */
5893 /* If we're emitting a nested function, make sure its parent gets
5894 emitted as well. Doing otherwise confuses debug info. */
5896 tree parent;
5897 for (parent = DECL_CONTEXT (current_function_decl);
5898 parent != NULL_TREE;
5899 parent = get_containing_scope (parent))
5900 if (TREE_CODE (parent) == FUNCTION_DECL)
5901 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
5904 /* We are now committed to emitting code for this function. Do any
5905 preparation, such as emitting abstract debug info for the inline
5906 before it gets mangled by optimization. */
5907 if (cgraph_function_possibly_inlined_p (current_function_decl))
5908 (*debug_hooks->outlining_inline_function) (current_function_decl);
5910 TREE_ASM_WRITTEN (current_function_decl) = 1;
5912 /* After expanding, the return labels are no longer needed. */
5913 return_label = NULL;
5914 naked_return_label = NULL;
5916 /* After expanding, the tm_restart map is no longer needed. */
5917 if (cfun->gimple_df->tm_restart)
5919 htab_delete (cfun->gimple_df->tm_restart);
5920 cfun->gimple_df->tm_restart = NULL;
5923 /* Tag the blocks with a depth number so that change_scope can find
5924 the common parent easily. */
5925 set_block_levels (DECL_INITIAL (cfun->decl), 0);
5926 default_rtl_profile ();
5928 timevar_pop (TV_POST_EXPAND);
5930 return 0;
5933 namespace {
5935 const pass_data pass_data_expand =
5937 RTL_PASS, /* type */
5938 "expand", /* name */
5939 OPTGROUP_NONE, /* optinfo_flags */
5940 false, /* has_gate */
5941 true, /* has_execute */
5942 TV_EXPAND, /* tv_id */
5943 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5944 | PROP_gimple_lcx
5945 | PROP_gimple_lvec ), /* properties_required */
5946 PROP_rtl, /* properties_provided */
5947 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5948 ( TODO_verify_ssa | TODO_verify_flow
5949 | TODO_verify_stmts ), /* todo_flags_start */
5950 0, /* todo_flags_finish */
5953 class pass_expand : public rtl_opt_pass
5955 public:
5956 pass_expand (gcc::context *ctxt)
5957 : rtl_opt_pass (pass_data_expand, ctxt)
5960 /* opt_pass methods: */
5961 unsigned int execute () { return gimple_expand_cfg (); }
5963 }; // class pass_expand
5965 } // anon namespace
5967 rtl_opt_pass *
5968 make_pass_expand (gcc::context *ctxt)
5970 return new pass_expand (ctxt);