gcc/
[official-gcc.git] / gcc / cfgexpand.c
bloba2b9977ebb1f103e6b60acb4a21331332da2c1c1
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "fold-const.h"
30 #include "stringpool.h"
31 #include "varasm.h"
32 #include "stor-layout.h"
33 #include "stmt.h"
34 #include "print-tree.h"
35 #include "tm_p.h"
36 #include "predict.h"
37 #include "function.h"
38 #include "dominance.h"
39 #include "cfg.h"
40 #include "cfgrtl.h"
41 #include "cfganal.h"
42 #include "cfgbuild.h"
43 #include "cfgcleanup.h"
44 #include "basic-block.h"
45 #include "insn-codes.h"
46 #include "optabs.h"
47 #include "flags.h"
48 #include "insn-config.h"
49 #include "expmed.h"
50 #include "dojump.h"
51 #include "explow.h"
52 #include "calls.h"
53 #include "emit-rtl.h"
54 #include "expr.h"
55 #include "langhooks.h"
56 #include "bitmap.h"
57 #include "tree-ssa-alias.h"
58 #include "internal-fn.h"
59 #include "tree-eh.h"
60 #include "gimple-expr.h"
61 #include "gimple.h"
62 #include "gimple-iterator.h"
63 #include "gimple-walk.h"
64 #include "gimple-ssa.h"
65 #include "plugin-api.h"
66 #include "ipa-ref.h"
67 #include "cgraph.h"
68 #include "tree-cfg.h"
69 #include "tree-phinodes.h"
70 #include "ssa-iterators.h"
71 #include "tree-ssanames.h"
72 #include "tree-dfa.h"
73 #include "tree-ssa.h"
74 #include "tree-pass.h"
75 #include "except.h"
76 #include "diagnostic.h"
77 #include "gimple-pretty-print.h"
78 #include "toplev.h"
79 #include "debug.h"
80 #include "params.h"
81 #include "tree-inline.h"
82 #include "value-prof.h"
83 #include "target.h"
84 #include "tree-ssa-live.h"
85 #include "tree-outof-ssa.h"
86 #include "sbitmap.h"
87 #include "cfgloop.h"
88 #include "regs.h" /* For reg_renumber. */
89 #include "insn-attr.h" /* For INSN_SCHEDULING. */
90 #include "asan.h"
91 #include "tree-ssa-address.h"
92 #include "recog.h"
93 #include "output.h"
94 #include "builtins.h"
95 #include "tree-chkp.h"
96 #include "rtl-chkp.h"
98 /* Some systems use __main in a way incompatible with its use in gcc, in these
99 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
100 give the same symbol without quotes for an alternative entry point. You
101 must define both, or neither. */
102 #ifndef NAME__MAIN
103 #define NAME__MAIN "__main"
104 #endif
106 /* This variable holds information helping the rewriting of SSA trees
107 into RTL. */
108 struct ssaexpand SA;
110 /* This variable holds the currently expanded gimple statement for purposes
111 of comminucating the profile info to the builtin expanders. */
112 gimple currently_expanding_gimple_stmt;
114 static rtx expand_debug_expr (tree);
116 /* Return an expression tree corresponding to the RHS of GIMPLE
117 statement STMT. */
119 tree
120 gimple_assign_rhs_to_tree (gimple stmt)
122 tree t;
123 enum gimple_rhs_class grhs_class;
125 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
127 if (grhs_class == GIMPLE_TERNARY_RHS)
128 t = build3 (gimple_assign_rhs_code (stmt),
129 TREE_TYPE (gimple_assign_lhs (stmt)),
130 gimple_assign_rhs1 (stmt),
131 gimple_assign_rhs2 (stmt),
132 gimple_assign_rhs3 (stmt));
133 else if (grhs_class == GIMPLE_BINARY_RHS)
134 t = build2 (gimple_assign_rhs_code (stmt),
135 TREE_TYPE (gimple_assign_lhs (stmt)),
136 gimple_assign_rhs1 (stmt),
137 gimple_assign_rhs2 (stmt));
138 else if (grhs_class == GIMPLE_UNARY_RHS)
139 t = build1 (gimple_assign_rhs_code (stmt),
140 TREE_TYPE (gimple_assign_lhs (stmt)),
141 gimple_assign_rhs1 (stmt));
142 else if (grhs_class == GIMPLE_SINGLE_RHS)
144 t = gimple_assign_rhs1 (stmt);
145 /* Avoid modifying this tree in place below. */
146 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
147 && gimple_location (stmt) != EXPR_LOCATION (t))
148 || (gimple_block (stmt)
149 && currently_expanding_to_rtl
150 && EXPR_P (t)))
151 t = copy_node (t);
153 else
154 gcc_unreachable ();
156 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
157 SET_EXPR_LOCATION (t, gimple_location (stmt));
159 return t;
163 #ifndef STACK_ALIGNMENT_NEEDED
164 #define STACK_ALIGNMENT_NEEDED 1
165 #endif
167 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
169 /* Associate declaration T with storage space X. If T is no
170 SSA name this is exactly SET_DECL_RTL, otherwise make the
171 partition of T associated with X. */
172 static inline void
173 set_rtl (tree t, rtx x)
175 if (TREE_CODE (t) == SSA_NAME)
177 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
178 if (x && !MEM_P (x))
179 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
180 /* For the benefit of debug information at -O0 (where vartracking
181 doesn't run) record the place also in the base DECL if it's
182 a normal variable (not a parameter). */
183 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
185 tree var = SSA_NAME_VAR (t);
186 /* If we don't yet have something recorded, just record it now. */
187 if (!DECL_RTL_SET_P (var))
188 SET_DECL_RTL (var, x);
189 /* If we have it set already to "multiple places" don't
190 change this. */
191 else if (DECL_RTL (var) == pc_rtx)
193 /* If we have something recorded and it's not the same place
194 as we want to record now, we have multiple partitions for the
195 same base variable, with different places. We can't just
196 randomly chose one, hence we have to say that we don't know.
197 This only happens with optimization, and there var-tracking
198 will figure out the right thing. */
199 else if (DECL_RTL (var) != x)
200 SET_DECL_RTL (var, pc_rtx);
203 else
204 SET_DECL_RTL (t, x);
207 /* This structure holds data relevant to one variable that will be
208 placed in a stack slot. */
209 struct stack_var
211 /* The Variable. */
212 tree decl;
214 /* Initially, the size of the variable. Later, the size of the partition,
215 if this variable becomes it's partition's representative. */
216 HOST_WIDE_INT size;
218 /* The *byte* alignment required for this variable. Or as, with the
219 size, the alignment for this partition. */
220 unsigned int alignb;
222 /* The partition representative. */
223 size_t representative;
225 /* The next stack variable in the partition, or EOC. */
226 size_t next;
228 /* The numbers of conflicting stack variables. */
229 bitmap conflicts;
232 #define EOC ((size_t)-1)
234 /* We have an array of such objects while deciding allocation. */
235 static struct stack_var *stack_vars;
236 static size_t stack_vars_alloc;
237 static size_t stack_vars_num;
238 static hash_map<tree, size_t> *decl_to_stack_part;
240 /* Conflict bitmaps go on this obstack. This allows us to destroy
241 all of them in one big sweep. */
242 static bitmap_obstack stack_var_bitmap_obstack;
244 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
245 is non-decreasing. */
246 static size_t *stack_vars_sorted;
248 /* The phase of the stack frame. This is the known misalignment of
249 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
250 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
251 static int frame_phase;
253 /* Used during expand_used_vars to remember if we saw any decls for
254 which we'd like to enable stack smashing protection. */
255 static bool has_protected_decls;
257 /* Used during expand_used_vars. Remember if we say a character buffer
258 smaller than our cutoff threshold. Used for -Wstack-protector. */
259 static bool has_short_buffer;
261 /* Compute the byte alignment to use for DECL. Ignore alignment
262 we can't do with expected alignment of the stack boundary. */
264 static unsigned int
265 align_local_variable (tree decl)
267 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
268 DECL_ALIGN (decl) = align;
269 return align / BITS_PER_UNIT;
272 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
273 down otherwise. Return truncated BASE value. */
275 static inline unsigned HOST_WIDE_INT
276 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
278 return align_up ? (base + align - 1) & -align : base & -align;
281 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
282 Return the frame offset. */
284 static HOST_WIDE_INT
285 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
287 HOST_WIDE_INT offset, new_frame_offset;
289 if (FRAME_GROWS_DOWNWARD)
291 new_frame_offset
292 = align_base (frame_offset - frame_phase - size,
293 align, false) + frame_phase;
294 offset = new_frame_offset;
296 else
298 new_frame_offset
299 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
300 offset = new_frame_offset;
301 new_frame_offset += size;
303 frame_offset = new_frame_offset;
305 if (frame_offset_overflow (frame_offset, cfun->decl))
306 frame_offset = offset = 0;
308 return offset;
311 /* Accumulate DECL into STACK_VARS. */
313 static void
314 add_stack_var (tree decl)
316 struct stack_var *v;
318 if (stack_vars_num >= stack_vars_alloc)
320 if (stack_vars_alloc)
321 stack_vars_alloc = stack_vars_alloc * 3 / 2;
322 else
323 stack_vars_alloc = 32;
324 stack_vars
325 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
327 if (!decl_to_stack_part)
328 decl_to_stack_part = new hash_map<tree, size_t>;
330 v = &stack_vars[stack_vars_num];
331 decl_to_stack_part->put (decl, stack_vars_num);
333 v->decl = decl;
334 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
335 /* Ensure that all variables have size, so that &a != &b for any two
336 variables that are simultaneously live. */
337 if (v->size == 0)
338 v->size = 1;
339 v->alignb = align_local_variable (SSAVAR (decl));
340 /* An alignment of zero can mightily confuse us later. */
341 gcc_assert (v->alignb != 0);
343 /* All variables are initially in their own partition. */
344 v->representative = stack_vars_num;
345 v->next = EOC;
347 /* All variables initially conflict with no other. */
348 v->conflicts = NULL;
350 /* Ensure that this decl doesn't get put onto the list twice. */
351 set_rtl (decl, pc_rtx);
353 stack_vars_num++;
356 /* Make the decls associated with luid's X and Y conflict. */
358 static void
359 add_stack_var_conflict (size_t x, size_t y)
361 struct stack_var *a = &stack_vars[x];
362 struct stack_var *b = &stack_vars[y];
363 if (!a->conflicts)
364 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
365 if (!b->conflicts)
366 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
367 bitmap_set_bit (a->conflicts, y);
368 bitmap_set_bit (b->conflicts, x);
371 /* Check whether the decls associated with luid's X and Y conflict. */
373 static bool
374 stack_var_conflict_p (size_t x, size_t y)
376 struct stack_var *a = &stack_vars[x];
377 struct stack_var *b = &stack_vars[y];
378 if (x == y)
379 return false;
380 /* Partitions containing an SSA name result from gimple registers
381 with things like unsupported modes. They are top-level and
382 hence conflict with everything else. */
383 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
384 return true;
386 if (!a->conflicts || !b->conflicts)
387 return false;
388 return bitmap_bit_p (a->conflicts, y);
391 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
392 enter its partition number into bitmap DATA. */
394 static bool
395 visit_op (gimple, tree op, tree, void *data)
397 bitmap active = (bitmap)data;
398 op = get_base_address (op);
399 if (op
400 && DECL_P (op)
401 && DECL_RTL_IF_SET (op) == pc_rtx)
403 size_t *v = decl_to_stack_part->get (op);
404 if (v)
405 bitmap_set_bit (active, *v);
407 return false;
410 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
411 record conflicts between it and all currently active other partitions
412 from bitmap DATA. */
414 static bool
415 visit_conflict (gimple, tree op, tree, void *data)
417 bitmap active = (bitmap)data;
418 op = get_base_address (op);
419 if (op
420 && DECL_P (op)
421 && DECL_RTL_IF_SET (op) == pc_rtx)
423 size_t *v = decl_to_stack_part->get (op);
424 if (v && bitmap_set_bit (active, *v))
426 size_t num = *v;
427 bitmap_iterator bi;
428 unsigned i;
429 gcc_assert (num < stack_vars_num);
430 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
431 add_stack_var_conflict (num, i);
434 return false;
437 /* Helper routine for add_scope_conflicts, calculating the active partitions
438 at the end of BB, leaving the result in WORK. We're called to generate
439 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
440 liveness. */
442 static void
443 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
445 edge e;
446 edge_iterator ei;
447 gimple_stmt_iterator gsi;
448 walk_stmt_load_store_addr_fn visit;
450 bitmap_clear (work);
451 FOR_EACH_EDGE (e, ei, bb->preds)
452 bitmap_ior_into (work, (bitmap)e->src->aux);
454 visit = visit_op;
456 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
458 gimple stmt = gsi_stmt (gsi);
459 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
461 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
463 gimple stmt = gsi_stmt (gsi);
465 if (gimple_clobber_p (stmt))
467 tree lhs = gimple_assign_lhs (stmt);
468 size_t *v;
469 /* Nested function lowering might introduce LHSs
470 that are COMPONENT_REFs. */
471 if (TREE_CODE (lhs) != VAR_DECL)
472 continue;
473 if (DECL_RTL_IF_SET (lhs) == pc_rtx
474 && (v = decl_to_stack_part->get (lhs)))
475 bitmap_clear_bit (work, *v);
477 else if (!is_gimple_debug (stmt))
479 if (for_conflict
480 && visit == visit_op)
482 /* If this is the first real instruction in this BB we need
483 to add conflicts for everything live at this point now.
484 Unlike classical liveness for named objects we can't
485 rely on seeing a def/use of the names we're interested in.
486 There might merely be indirect loads/stores. We'd not add any
487 conflicts for such partitions. */
488 bitmap_iterator bi;
489 unsigned i;
490 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
492 struct stack_var *a = &stack_vars[i];
493 if (!a->conflicts)
494 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
495 bitmap_ior_into (a->conflicts, work);
497 visit = visit_conflict;
499 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
504 /* Generate stack partition conflicts between all partitions that are
505 simultaneously live. */
507 static void
508 add_scope_conflicts (void)
510 basic_block bb;
511 bool changed;
512 bitmap work = BITMAP_ALLOC (NULL);
513 int *rpo;
514 int n_bbs;
516 /* We approximate the live range of a stack variable by taking the first
517 mention of its name as starting point(s), and by the end-of-scope
518 death clobber added by gimplify as ending point(s) of the range.
519 This overapproximates in the case we for instance moved an address-taken
520 operation upward, without also moving a dereference to it upwards.
521 But it's conservatively correct as a variable never can hold values
522 before its name is mentioned at least once.
524 We then do a mostly classical bitmap liveness algorithm. */
526 FOR_ALL_BB_FN (bb, cfun)
527 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
529 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
530 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
532 changed = true;
533 while (changed)
535 int i;
536 changed = false;
537 for (i = 0; i < n_bbs; i++)
539 bitmap active;
540 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
541 active = (bitmap)bb->aux;
542 add_scope_conflicts_1 (bb, work, false);
543 if (bitmap_ior_into (active, work))
544 changed = true;
548 FOR_EACH_BB_FN (bb, cfun)
549 add_scope_conflicts_1 (bb, work, true);
551 free (rpo);
552 BITMAP_FREE (work);
553 FOR_ALL_BB_FN (bb, cfun)
554 BITMAP_FREE (bb->aux);
557 /* A subroutine of partition_stack_vars. A comparison function for qsort,
558 sorting an array of indices by the properties of the object. */
560 static int
561 stack_var_cmp (const void *a, const void *b)
563 size_t ia = *(const size_t *)a;
564 size_t ib = *(const size_t *)b;
565 unsigned int aligna = stack_vars[ia].alignb;
566 unsigned int alignb = stack_vars[ib].alignb;
567 HOST_WIDE_INT sizea = stack_vars[ia].size;
568 HOST_WIDE_INT sizeb = stack_vars[ib].size;
569 tree decla = stack_vars[ia].decl;
570 tree declb = stack_vars[ib].decl;
571 bool largea, largeb;
572 unsigned int uida, uidb;
574 /* Primary compare on "large" alignment. Large comes first. */
575 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
576 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
577 if (largea != largeb)
578 return (int)largeb - (int)largea;
580 /* Secondary compare on size, decreasing */
581 if (sizea > sizeb)
582 return -1;
583 if (sizea < sizeb)
584 return 1;
586 /* Tertiary compare on true alignment, decreasing. */
587 if (aligna < alignb)
588 return -1;
589 if (aligna > alignb)
590 return 1;
592 /* Final compare on ID for sort stability, increasing.
593 Two SSA names are compared by their version, SSA names come before
594 non-SSA names, and two normal decls are compared by their DECL_UID. */
595 if (TREE_CODE (decla) == SSA_NAME)
597 if (TREE_CODE (declb) == SSA_NAME)
598 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
599 else
600 return -1;
602 else if (TREE_CODE (declb) == SSA_NAME)
603 return 1;
604 else
605 uida = DECL_UID (decla), uidb = DECL_UID (declb);
606 if (uida < uidb)
607 return 1;
608 if (uida > uidb)
609 return -1;
610 return 0;
613 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
614 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
616 /* If the points-to solution *PI points to variables that are in a partition
617 together with other variables add all partition members to the pointed-to
618 variables bitmap. */
620 static void
621 add_partitioned_vars_to_ptset (struct pt_solution *pt,
622 part_hashmap *decls_to_partitions,
623 hash_set<bitmap> *visited, bitmap temp)
625 bitmap_iterator bi;
626 unsigned i;
627 bitmap *part;
629 if (pt->anything
630 || pt->vars == NULL
631 /* The pointed-to vars bitmap is shared, it is enough to
632 visit it once. */
633 || visited->add (pt->vars))
634 return;
636 bitmap_clear (temp);
638 /* By using a temporary bitmap to store all members of the partitions
639 we have to add we make sure to visit each of the partitions only
640 once. */
641 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
642 if ((!temp
643 || !bitmap_bit_p (temp, i))
644 && (part = decls_to_partitions->get (i)))
645 bitmap_ior_into (temp, *part);
646 if (!bitmap_empty_p (temp))
647 bitmap_ior_into (pt->vars, temp);
650 /* Update points-to sets based on partition info, so we can use them on RTL.
651 The bitmaps representing stack partitions will be saved until expand,
652 where partitioned decls used as bases in memory expressions will be
653 rewritten. */
655 static void
656 update_alias_info_with_stack_vars (void)
658 part_hashmap *decls_to_partitions = NULL;
659 size_t i, j;
660 tree var = NULL_TREE;
662 for (i = 0; i < stack_vars_num; i++)
664 bitmap part = NULL;
665 tree name;
666 struct ptr_info_def *pi;
668 /* Not interested in partitions with single variable. */
669 if (stack_vars[i].representative != i
670 || stack_vars[i].next == EOC)
671 continue;
673 if (!decls_to_partitions)
675 decls_to_partitions = new part_hashmap;
676 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
679 /* Create an SSA_NAME that points to the partition for use
680 as base during alias-oracle queries on RTL for bases that
681 have been partitioned. */
682 if (var == NULL_TREE)
683 var = create_tmp_var (ptr_type_node);
684 name = make_ssa_name (var);
686 /* Create bitmaps representing partitions. They will be used for
687 points-to sets later, so use GGC alloc. */
688 part = BITMAP_GGC_ALLOC ();
689 for (j = i; j != EOC; j = stack_vars[j].next)
691 tree decl = stack_vars[j].decl;
692 unsigned int uid = DECL_PT_UID (decl);
693 bitmap_set_bit (part, uid);
694 decls_to_partitions->put (uid, part);
695 cfun->gimple_df->decls_to_pointers->put (decl, name);
696 if (TREE_ADDRESSABLE (decl))
697 TREE_ADDRESSABLE (name) = 1;
700 /* Make the SSA name point to all partition members. */
701 pi = get_ptr_info (name);
702 pt_solution_set (&pi->pt, part, false);
705 /* Make all points-to sets that contain one member of a partition
706 contain all members of the partition. */
707 if (decls_to_partitions)
709 unsigned i;
710 hash_set<bitmap> visited;
711 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
713 for (i = 1; i < num_ssa_names; i++)
715 tree name = ssa_name (i);
716 struct ptr_info_def *pi;
718 if (name
719 && POINTER_TYPE_P (TREE_TYPE (name))
720 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
721 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
722 &visited, temp);
725 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
726 decls_to_partitions, &visited, temp);
728 delete decls_to_partitions;
729 BITMAP_FREE (temp);
733 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
734 partitioning algorithm. Partitions A and B are known to be non-conflicting.
735 Merge them into a single partition A. */
737 static void
738 union_stack_vars (size_t a, size_t b)
740 struct stack_var *vb = &stack_vars[b];
741 bitmap_iterator bi;
742 unsigned u;
744 gcc_assert (stack_vars[b].next == EOC);
745 /* Add B to A's partition. */
746 stack_vars[b].next = stack_vars[a].next;
747 stack_vars[b].representative = a;
748 stack_vars[a].next = b;
750 /* Update the required alignment of partition A to account for B. */
751 if (stack_vars[a].alignb < stack_vars[b].alignb)
752 stack_vars[a].alignb = stack_vars[b].alignb;
754 /* Update the interference graph and merge the conflicts. */
755 if (vb->conflicts)
757 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
758 add_stack_var_conflict (a, stack_vars[u].representative);
759 BITMAP_FREE (vb->conflicts);
763 /* A subroutine of expand_used_vars. Binpack the variables into
764 partitions constrained by the interference graph. The overall
765 algorithm used is as follows:
767 Sort the objects by size in descending order.
768 For each object A {
769 S = size(A)
770 O = 0
771 loop {
772 Look for the largest non-conflicting object B with size <= S.
773 UNION (A, B)
778 static void
779 partition_stack_vars (void)
781 size_t si, sj, n = stack_vars_num;
783 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
784 for (si = 0; si < n; ++si)
785 stack_vars_sorted[si] = si;
787 if (n == 1)
788 return;
790 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
792 for (si = 0; si < n; ++si)
794 size_t i = stack_vars_sorted[si];
795 unsigned int ialign = stack_vars[i].alignb;
796 HOST_WIDE_INT isize = stack_vars[i].size;
798 /* Ignore objects that aren't partition representatives. If we
799 see a var that is not a partition representative, it must
800 have been merged earlier. */
801 if (stack_vars[i].representative != i)
802 continue;
804 for (sj = si + 1; sj < n; ++sj)
806 size_t j = stack_vars_sorted[sj];
807 unsigned int jalign = stack_vars[j].alignb;
808 HOST_WIDE_INT jsize = stack_vars[j].size;
810 /* Ignore objects that aren't partition representatives. */
811 if (stack_vars[j].representative != j)
812 continue;
814 /* Do not mix objects of "small" (supported) alignment
815 and "large" (unsupported) alignment. */
816 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
817 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
818 break;
820 /* For Address Sanitizer do not mix objects with different
821 sizes, as the shorter vars wouldn't be adequately protected.
822 Don't do that for "large" (unsupported) alignment objects,
823 those aren't protected anyway. */
824 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
825 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
826 break;
828 /* Ignore conflicting objects. */
829 if (stack_var_conflict_p (i, j))
830 continue;
832 /* UNION the objects, placing J at OFFSET. */
833 union_stack_vars (i, j);
837 update_alias_info_with_stack_vars ();
840 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
842 static void
843 dump_stack_var_partition (void)
845 size_t si, i, j, n = stack_vars_num;
847 for (si = 0; si < n; ++si)
849 i = stack_vars_sorted[si];
851 /* Skip variables that aren't partition representatives, for now. */
852 if (stack_vars[i].representative != i)
853 continue;
855 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
856 " align %u\n", (unsigned long) i, stack_vars[i].size,
857 stack_vars[i].alignb);
859 for (j = i; j != EOC; j = stack_vars[j].next)
861 fputc ('\t', dump_file);
862 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
864 fputc ('\n', dump_file);
868 /* Assign rtl to DECL at BASE + OFFSET. */
870 static void
871 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
872 HOST_WIDE_INT offset)
874 unsigned align;
875 rtx x;
877 /* If this fails, we've overflowed the stack frame. Error nicely? */
878 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
880 x = plus_constant (Pmode, base, offset);
881 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
883 if (TREE_CODE (decl) != SSA_NAME)
885 /* Set alignment we actually gave this decl if it isn't an SSA name.
886 If it is we generate stack slots only accidentally so it isn't as
887 important, we'll simply use the alignment that is already set. */
888 if (base == virtual_stack_vars_rtx)
889 offset -= frame_phase;
890 align = offset & -offset;
891 align *= BITS_PER_UNIT;
892 if (align == 0 || align > base_align)
893 align = base_align;
895 /* One would think that we could assert that we're not decreasing
896 alignment here, but (at least) the i386 port does exactly this
897 via the MINIMUM_ALIGNMENT hook. */
899 DECL_ALIGN (decl) = align;
900 DECL_USER_ALIGN (decl) = 0;
903 set_mem_attributes (x, SSAVAR (decl), true);
904 set_rtl (decl, x);
907 struct stack_vars_data
909 /* Vector of offset pairs, always end of some padding followed
910 by start of the padding that needs Address Sanitizer protection.
911 The vector is in reversed, highest offset pairs come first. */
912 vec<HOST_WIDE_INT> asan_vec;
914 /* Vector of partition representative decls in between the paddings. */
915 vec<tree> asan_decl_vec;
917 /* Base pseudo register for Address Sanitizer protected automatic vars. */
918 rtx asan_base;
920 /* Alignment needed for the Address Sanitizer protected automatic vars. */
921 unsigned int asan_alignb;
924 /* A subroutine of expand_used_vars. Give each partition representative
925 a unique location within the stack frame. Update each partition member
926 with that location. */
928 static void
929 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
931 size_t si, i, j, n = stack_vars_num;
932 HOST_WIDE_INT large_size = 0, large_alloc = 0;
933 rtx large_base = NULL;
934 unsigned large_align = 0;
935 tree decl;
937 /* Determine if there are any variables requiring "large" alignment.
938 Since these are dynamically allocated, we only process these if
939 no predicate involved. */
940 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
941 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
943 /* Find the total size of these variables. */
944 for (si = 0; si < n; ++si)
946 unsigned alignb;
948 i = stack_vars_sorted[si];
949 alignb = stack_vars[i].alignb;
951 /* All "large" alignment decls come before all "small" alignment
952 decls, but "large" alignment decls are not sorted based on
953 their alignment. Increase large_align to track the largest
954 required alignment. */
955 if ((alignb * BITS_PER_UNIT) > large_align)
956 large_align = alignb * BITS_PER_UNIT;
958 /* Stop when we get to the first decl with "small" alignment. */
959 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
960 break;
962 /* Skip variables that aren't partition representatives. */
963 if (stack_vars[i].representative != i)
964 continue;
966 /* Skip variables that have already had rtl assigned. See also
967 add_stack_var where we perpetrate this pc_rtx hack. */
968 decl = stack_vars[i].decl;
969 if ((TREE_CODE (decl) == SSA_NAME
970 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
971 : DECL_RTL (decl)) != pc_rtx)
972 continue;
974 large_size += alignb - 1;
975 large_size &= -(HOST_WIDE_INT)alignb;
976 large_size += stack_vars[i].size;
979 /* If there were any, allocate space. */
980 if (large_size > 0)
981 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
982 large_align, true);
985 for (si = 0; si < n; ++si)
987 rtx base;
988 unsigned base_align, alignb;
989 HOST_WIDE_INT offset;
991 i = stack_vars_sorted[si];
993 /* Skip variables that aren't partition representatives, for now. */
994 if (stack_vars[i].representative != i)
995 continue;
997 /* Skip variables that have already had rtl assigned. See also
998 add_stack_var where we perpetrate this pc_rtx hack. */
999 decl = stack_vars[i].decl;
1000 if ((TREE_CODE (decl) == SSA_NAME
1001 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1002 : DECL_RTL (decl)) != pc_rtx)
1003 continue;
1005 /* Check the predicate to see whether this variable should be
1006 allocated in this pass. */
1007 if (pred && !pred (i))
1008 continue;
1010 alignb = stack_vars[i].alignb;
1011 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1013 base = virtual_stack_vars_rtx;
1014 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1016 HOST_WIDE_INT prev_offset
1017 = align_base (frame_offset,
1018 MAX (alignb, ASAN_RED_ZONE_SIZE),
1019 FRAME_GROWS_DOWNWARD);
1020 tree repr_decl = NULL_TREE;
1021 offset
1022 = alloc_stack_frame_space (stack_vars[i].size
1023 + ASAN_RED_ZONE_SIZE,
1024 MAX (alignb, ASAN_RED_ZONE_SIZE));
1026 data->asan_vec.safe_push (prev_offset);
1027 data->asan_vec.safe_push (offset + stack_vars[i].size);
1028 /* Find best representative of the partition.
1029 Prefer those with DECL_NAME, even better
1030 satisfying asan_protect_stack_decl predicate. */
1031 for (j = i; j != EOC; j = stack_vars[j].next)
1032 if (asan_protect_stack_decl (stack_vars[j].decl)
1033 && DECL_NAME (stack_vars[j].decl))
1035 repr_decl = stack_vars[j].decl;
1036 break;
1038 else if (repr_decl == NULL_TREE
1039 && DECL_P (stack_vars[j].decl)
1040 && DECL_NAME (stack_vars[j].decl))
1041 repr_decl = stack_vars[j].decl;
1042 if (repr_decl == NULL_TREE)
1043 repr_decl = stack_vars[i].decl;
1044 data->asan_decl_vec.safe_push (repr_decl);
1045 data->asan_alignb = MAX (data->asan_alignb, alignb);
1046 if (data->asan_base == NULL)
1047 data->asan_base = gen_reg_rtx (Pmode);
1048 base = data->asan_base;
1050 if (!STRICT_ALIGNMENT)
1051 base_align = crtl->max_used_stack_slot_alignment;
1052 else
1053 base_align = MAX (crtl->max_used_stack_slot_alignment,
1054 GET_MODE_ALIGNMENT (SImode)
1055 << ASAN_SHADOW_SHIFT);
1057 else
1059 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1060 base_align = crtl->max_used_stack_slot_alignment;
1063 else
1065 /* Large alignment is only processed in the last pass. */
1066 if (pred)
1067 continue;
1068 gcc_assert (large_base != NULL);
1070 large_alloc += alignb - 1;
1071 large_alloc &= -(HOST_WIDE_INT)alignb;
1072 offset = large_alloc;
1073 large_alloc += stack_vars[i].size;
1075 base = large_base;
1076 base_align = large_align;
1079 /* Create rtl for each variable based on their location within the
1080 partition. */
1081 for (j = i; j != EOC; j = stack_vars[j].next)
1083 expand_one_stack_var_at (stack_vars[j].decl,
1084 base, base_align,
1085 offset);
1089 gcc_assert (large_alloc == large_size);
1092 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1093 static HOST_WIDE_INT
1094 account_stack_vars (void)
1096 size_t si, j, i, n = stack_vars_num;
1097 HOST_WIDE_INT size = 0;
1099 for (si = 0; si < n; ++si)
1101 i = stack_vars_sorted[si];
1103 /* Skip variables that aren't partition representatives, for now. */
1104 if (stack_vars[i].representative != i)
1105 continue;
1107 size += stack_vars[i].size;
1108 for (j = i; j != EOC; j = stack_vars[j].next)
1109 set_rtl (stack_vars[j].decl, NULL);
1111 return size;
1114 /* A subroutine of expand_one_var. Called to immediately assign rtl
1115 to a variable to be allocated in the stack frame. */
1117 static void
1118 expand_one_stack_var (tree var)
1120 HOST_WIDE_INT size, offset;
1121 unsigned byte_align;
1123 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1124 byte_align = align_local_variable (SSAVAR (var));
1126 /* We handle highly aligned variables in expand_stack_vars. */
1127 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1129 offset = alloc_stack_frame_space (size, byte_align);
1131 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1132 crtl->max_used_stack_slot_alignment, offset);
1135 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1136 that will reside in a hard register. */
1138 static void
1139 expand_one_hard_reg_var (tree var)
1141 rest_of_decl_compilation (var, 0, 0);
1144 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1145 that will reside in a pseudo register. */
1147 static void
1148 expand_one_register_var (tree var)
1150 tree decl = SSAVAR (var);
1151 tree type = TREE_TYPE (decl);
1152 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1153 rtx x = gen_reg_rtx (reg_mode);
1155 set_rtl (var, x);
1157 /* Note if the object is a user variable. */
1158 if (!DECL_ARTIFICIAL (decl))
1159 mark_user_reg (x);
1161 if (POINTER_TYPE_P (type))
1162 mark_reg_pointer (x, get_pointer_alignment (var));
1165 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1166 has some associated error, e.g. its type is error-mark. We just need
1167 to pick something that won't crash the rest of the compiler. */
1169 static void
1170 expand_one_error_var (tree var)
1172 machine_mode mode = DECL_MODE (var);
1173 rtx x;
1175 if (mode == BLKmode)
1176 x = gen_rtx_MEM (BLKmode, const0_rtx);
1177 else if (mode == VOIDmode)
1178 x = const0_rtx;
1179 else
1180 x = gen_reg_rtx (mode);
1182 SET_DECL_RTL (var, x);
1185 /* A subroutine of expand_one_var. VAR is a variable that will be
1186 allocated to the local stack frame. Return true if we wish to
1187 add VAR to STACK_VARS so that it will be coalesced with other
1188 variables. Return false to allocate VAR immediately.
1190 This function is used to reduce the number of variables considered
1191 for coalescing, which reduces the size of the quadratic problem. */
1193 static bool
1194 defer_stack_allocation (tree var, bool toplevel)
1196 /* Whether the variable is small enough for immediate allocation not to be
1197 a problem with regard to the frame size. */
1198 bool smallish
1199 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1200 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1202 /* If stack protection is enabled, *all* stack variables must be deferred,
1203 so that we can re-order the strings to the top of the frame.
1204 Similarly for Address Sanitizer. */
1205 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1206 return true;
1208 /* We handle "large" alignment via dynamic allocation. We want to handle
1209 this extra complication in only one place, so defer them. */
1210 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1211 return true;
1213 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1214 might be detached from their block and appear at toplevel when we reach
1215 here. We want to coalesce them with variables from other blocks when
1216 the immediate contribution to the frame size would be noticeable. */
1217 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1218 return true;
1220 /* Variables declared in the outermost scope automatically conflict
1221 with every other variable. The only reason to want to defer them
1222 at all is that, after sorting, we can more efficiently pack
1223 small variables in the stack frame. Continue to defer at -O2. */
1224 if (toplevel && optimize < 2)
1225 return false;
1227 /* Without optimization, *most* variables are allocated from the
1228 stack, which makes the quadratic problem large exactly when we
1229 want compilation to proceed as quickly as possible. On the
1230 other hand, we don't want the function's stack frame size to
1231 get completely out of hand. So we avoid adding scalars and
1232 "small" aggregates to the list at all. */
1233 if (optimize == 0 && smallish)
1234 return false;
1236 return true;
1239 /* A subroutine of expand_used_vars. Expand one variable according to
1240 its flavor. Variables to be placed on the stack are not actually
1241 expanded yet, merely recorded.
1242 When REALLY_EXPAND is false, only add stack values to be allocated.
1243 Return stack usage this variable is supposed to take.
1246 static HOST_WIDE_INT
1247 expand_one_var (tree var, bool toplevel, bool really_expand)
1249 unsigned int align = BITS_PER_UNIT;
1250 tree origvar = var;
1252 var = SSAVAR (var);
1254 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1256 /* Because we don't know if VAR will be in register or on stack,
1257 we conservatively assume it will be on stack even if VAR is
1258 eventually put into register after RA pass. For non-automatic
1259 variables, which won't be on stack, we collect alignment of
1260 type and ignore user specified alignment. Similarly for
1261 SSA_NAMEs for which use_register_for_decl returns true. */
1262 if (TREE_STATIC (var)
1263 || DECL_EXTERNAL (var)
1264 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1265 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1266 TYPE_MODE (TREE_TYPE (var)),
1267 TYPE_ALIGN (TREE_TYPE (var)));
1268 else if (DECL_HAS_VALUE_EXPR_P (var)
1269 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1270 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1271 or variables which were assigned a stack slot already by
1272 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1273 changed from the offset chosen to it. */
1274 align = crtl->stack_alignment_estimated;
1275 else
1276 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1278 /* If the variable alignment is very large we'll dynamicaly allocate
1279 it, which means that in-frame portion is just a pointer. */
1280 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1281 align = POINTER_SIZE;
1284 if (SUPPORTS_STACK_ALIGNMENT
1285 && crtl->stack_alignment_estimated < align)
1287 /* stack_alignment_estimated shouldn't change after stack
1288 realign decision made */
1289 gcc_assert (!crtl->stack_realign_processed);
1290 crtl->stack_alignment_estimated = align;
1293 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1294 So here we only make sure stack_alignment_needed >= align. */
1295 if (crtl->stack_alignment_needed < align)
1296 crtl->stack_alignment_needed = align;
1297 if (crtl->max_used_stack_slot_alignment < align)
1298 crtl->max_used_stack_slot_alignment = align;
1300 if (TREE_CODE (origvar) == SSA_NAME)
1302 gcc_assert (TREE_CODE (var) != VAR_DECL
1303 || (!DECL_EXTERNAL (var)
1304 && !DECL_HAS_VALUE_EXPR_P (var)
1305 && !TREE_STATIC (var)
1306 && TREE_TYPE (var) != error_mark_node
1307 && !DECL_HARD_REGISTER (var)
1308 && really_expand));
1310 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1312 else if (DECL_EXTERNAL (var))
1314 else if (DECL_HAS_VALUE_EXPR_P (var))
1316 else if (TREE_STATIC (var))
1318 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1320 else if (TREE_TYPE (var) == error_mark_node)
1322 if (really_expand)
1323 expand_one_error_var (var);
1325 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1327 if (really_expand)
1329 expand_one_hard_reg_var (var);
1330 if (!DECL_HARD_REGISTER (var))
1331 /* Invalid register specification. */
1332 expand_one_error_var (var);
1335 else if (use_register_for_decl (var))
1337 if (really_expand)
1338 expand_one_register_var (origvar);
1340 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1342 /* Reject variables which cover more than half of the address-space. */
1343 if (really_expand)
1345 error ("size of variable %q+D is too large", var);
1346 expand_one_error_var (var);
1349 else if (defer_stack_allocation (var, toplevel))
1350 add_stack_var (origvar);
1351 else
1353 if (really_expand)
1354 expand_one_stack_var (origvar);
1355 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1357 return 0;
1360 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1361 expanding variables. Those variables that can be put into registers
1362 are allocated pseudos; those that can't are put on the stack.
1364 TOPLEVEL is true if this is the outermost BLOCK. */
1366 static void
1367 expand_used_vars_for_block (tree block, bool toplevel)
1369 tree t;
1371 /* Expand all variables at this level. */
1372 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1373 if (TREE_USED (t)
1374 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1375 || !DECL_NONSHAREABLE (t)))
1376 expand_one_var (t, toplevel, true);
1378 /* Expand all variables at containing levels. */
1379 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1380 expand_used_vars_for_block (t, false);
1383 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1384 and clear TREE_USED on all local variables. */
1386 static void
1387 clear_tree_used (tree block)
1389 tree t;
1391 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1392 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1393 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1394 || !DECL_NONSHAREABLE (t))
1395 TREE_USED (t) = 0;
1397 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1398 clear_tree_used (t);
1401 enum {
1402 SPCT_FLAG_DEFAULT = 1,
1403 SPCT_FLAG_ALL = 2,
1404 SPCT_FLAG_STRONG = 3,
1405 SPCT_FLAG_EXPLICIT = 4
1408 /* Examine TYPE and determine a bit mask of the following features. */
1410 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1411 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1412 #define SPCT_HAS_ARRAY 4
1413 #define SPCT_HAS_AGGREGATE 8
1415 static unsigned int
1416 stack_protect_classify_type (tree type)
1418 unsigned int ret = 0;
1419 tree t;
1421 switch (TREE_CODE (type))
1423 case ARRAY_TYPE:
1424 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1425 if (t == char_type_node
1426 || t == signed_char_type_node
1427 || t == unsigned_char_type_node)
1429 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1430 unsigned HOST_WIDE_INT len;
1432 if (!TYPE_SIZE_UNIT (type)
1433 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1434 len = max;
1435 else
1436 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1438 if (len < max)
1439 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1440 else
1441 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1443 else
1444 ret = SPCT_HAS_ARRAY;
1445 break;
1447 case UNION_TYPE:
1448 case QUAL_UNION_TYPE:
1449 case RECORD_TYPE:
1450 ret = SPCT_HAS_AGGREGATE;
1451 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1452 if (TREE_CODE (t) == FIELD_DECL)
1453 ret |= stack_protect_classify_type (TREE_TYPE (t));
1454 break;
1456 default:
1457 break;
1460 return ret;
1463 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1464 part of the local stack frame. Remember if we ever return nonzero for
1465 any variable in this function. The return value is the phase number in
1466 which the variable should be allocated. */
1468 static int
1469 stack_protect_decl_phase (tree decl)
1471 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1472 int ret = 0;
1474 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1475 has_short_buffer = true;
1477 if (flag_stack_protect == SPCT_FLAG_ALL
1478 || flag_stack_protect == SPCT_FLAG_STRONG
1479 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1480 && lookup_attribute ("stack_protect",
1481 DECL_ATTRIBUTES (current_function_decl))))
1483 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1484 && !(bits & SPCT_HAS_AGGREGATE))
1485 ret = 1;
1486 else if (bits & SPCT_HAS_ARRAY)
1487 ret = 2;
1489 else
1490 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1492 if (ret)
1493 has_protected_decls = true;
1495 return ret;
1498 /* Two helper routines that check for phase 1 and phase 2. These are used
1499 as callbacks for expand_stack_vars. */
1501 static bool
1502 stack_protect_decl_phase_1 (size_t i)
1504 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1507 static bool
1508 stack_protect_decl_phase_2 (size_t i)
1510 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1513 /* And helper function that checks for asan phase (with stack protector
1514 it is phase 3). This is used as callback for expand_stack_vars.
1515 Returns true if any of the vars in the partition need to be protected. */
1517 static bool
1518 asan_decl_phase_3 (size_t i)
1520 while (i != EOC)
1522 if (asan_protect_stack_decl (stack_vars[i].decl))
1523 return true;
1524 i = stack_vars[i].next;
1526 return false;
1529 /* Ensure that variables in different stack protection phases conflict
1530 so that they are not merged and share the same stack slot. */
1532 static void
1533 add_stack_protection_conflicts (void)
1535 size_t i, j, n = stack_vars_num;
1536 unsigned char *phase;
1538 phase = XNEWVEC (unsigned char, n);
1539 for (i = 0; i < n; ++i)
1540 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1542 for (i = 0; i < n; ++i)
1544 unsigned char ph_i = phase[i];
1545 for (j = i + 1; j < n; ++j)
1546 if (ph_i != phase[j])
1547 add_stack_var_conflict (i, j);
1550 XDELETEVEC (phase);
1553 /* Create a decl for the guard at the top of the stack frame. */
1555 static void
1556 create_stack_guard (void)
1558 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1559 VAR_DECL, NULL, ptr_type_node);
1560 TREE_THIS_VOLATILE (guard) = 1;
1561 TREE_USED (guard) = 1;
1562 expand_one_stack_var (guard);
1563 crtl->stack_protect_guard = guard;
1566 /* Prepare for expanding variables. */
1567 static void
1568 init_vars_expansion (void)
1570 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1571 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1573 /* A map from decl to stack partition. */
1574 decl_to_stack_part = new hash_map<tree, size_t>;
1576 /* Initialize local stack smashing state. */
1577 has_protected_decls = false;
1578 has_short_buffer = false;
1581 /* Free up stack variable graph data. */
1582 static void
1583 fini_vars_expansion (void)
1585 bitmap_obstack_release (&stack_var_bitmap_obstack);
1586 if (stack_vars)
1587 XDELETEVEC (stack_vars);
1588 if (stack_vars_sorted)
1589 XDELETEVEC (stack_vars_sorted);
1590 stack_vars = NULL;
1591 stack_vars_sorted = NULL;
1592 stack_vars_alloc = stack_vars_num = 0;
1593 delete decl_to_stack_part;
1594 decl_to_stack_part = NULL;
1597 /* Make a fair guess for the size of the stack frame of the function
1598 in NODE. This doesn't have to be exact, the result is only used in
1599 the inline heuristics. So we don't want to run the full stack var
1600 packing algorithm (which is quadratic in the number of stack vars).
1601 Instead, we calculate the total size of all stack vars. This turns
1602 out to be a pretty fair estimate -- packing of stack vars doesn't
1603 happen very often. */
1605 HOST_WIDE_INT
1606 estimated_stack_frame_size (struct cgraph_node *node)
1608 HOST_WIDE_INT size = 0;
1609 size_t i;
1610 tree var;
1611 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1613 push_cfun (fn);
1615 init_vars_expansion ();
1617 FOR_EACH_LOCAL_DECL (fn, i, var)
1618 if (auto_var_in_fn_p (var, fn->decl))
1619 size += expand_one_var (var, true, false);
1621 if (stack_vars_num > 0)
1623 /* Fake sorting the stack vars for account_stack_vars (). */
1624 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1625 for (i = 0; i < stack_vars_num; ++i)
1626 stack_vars_sorted[i] = i;
1627 size += account_stack_vars ();
1630 fini_vars_expansion ();
1631 pop_cfun ();
1632 return size;
1635 /* Helper routine to check if a record or union contains an array field. */
1637 static int
1638 record_or_union_type_has_array_p (const_tree tree_type)
1640 tree fields = TYPE_FIELDS (tree_type);
1641 tree f;
1643 for (f = fields; f; f = DECL_CHAIN (f))
1644 if (TREE_CODE (f) == FIELD_DECL)
1646 tree field_type = TREE_TYPE (f);
1647 if (RECORD_OR_UNION_TYPE_P (field_type)
1648 && record_or_union_type_has_array_p (field_type))
1649 return 1;
1650 if (TREE_CODE (field_type) == ARRAY_TYPE)
1651 return 1;
1653 return 0;
1656 /* Check if the current function has local referenced variables that
1657 have their addresses taken, contain an array, or are arrays. */
1659 static bool
1660 stack_protect_decl_p ()
1662 unsigned i;
1663 tree var;
1665 FOR_EACH_LOCAL_DECL (cfun, i, var)
1666 if (!is_global_var (var))
1668 tree var_type = TREE_TYPE (var);
1669 if (TREE_CODE (var) == VAR_DECL
1670 && (TREE_CODE (var_type) == ARRAY_TYPE
1671 || TREE_ADDRESSABLE (var)
1672 || (RECORD_OR_UNION_TYPE_P (var_type)
1673 && record_or_union_type_has_array_p (var_type))))
1674 return true;
1676 return false;
1679 /* Check if the current function has calls that use a return slot. */
1681 static bool
1682 stack_protect_return_slot_p ()
1684 basic_block bb;
1686 FOR_ALL_BB_FN (bb, cfun)
1687 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1688 !gsi_end_p (gsi); gsi_next (&gsi))
1690 gimple stmt = gsi_stmt (gsi);
1691 /* This assumes that calls to internal-only functions never
1692 use a return slot. */
1693 if (is_gimple_call (stmt)
1694 && !gimple_call_internal_p (stmt)
1695 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1696 gimple_call_fndecl (stmt)))
1697 return true;
1699 return false;
1702 /* Expand all variables used in the function. */
1704 static rtx_insn *
1705 expand_used_vars (void)
1707 tree var, outer_block = DECL_INITIAL (current_function_decl);
1708 vec<tree> maybe_local_decls = vNULL;
1709 rtx_insn *var_end_seq = NULL;
1710 unsigned i;
1711 unsigned len;
1712 bool gen_stack_protect_signal = false;
1714 /* Compute the phase of the stack frame for this function. */
1716 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1717 int off = STARTING_FRAME_OFFSET % align;
1718 frame_phase = off ? align - off : 0;
1721 /* Set TREE_USED on all variables in the local_decls. */
1722 FOR_EACH_LOCAL_DECL (cfun, i, var)
1723 TREE_USED (var) = 1;
1724 /* Clear TREE_USED on all variables associated with a block scope. */
1725 clear_tree_used (DECL_INITIAL (current_function_decl));
1727 init_vars_expansion ();
1729 if (targetm.use_pseudo_pic_reg ())
1730 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1732 hash_map<tree, tree> ssa_name_decls;
1733 for (i = 0; i < SA.map->num_partitions; i++)
1735 tree var = partition_to_var (SA.map, i);
1737 gcc_assert (!virtual_operand_p (var));
1739 /* Assign decls to each SSA name partition, share decls for partitions
1740 we could have coalesced (those with the same type). */
1741 if (SSA_NAME_VAR (var) == NULL_TREE)
1743 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1744 if (!*slot)
1745 *slot = create_tmp_reg (TREE_TYPE (var));
1746 replace_ssa_name_symbol (var, *slot);
1749 /* Always allocate space for partitions based on VAR_DECLs. But for
1750 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1751 debug info, there is no need to do so if optimization is disabled
1752 because all the SSA_NAMEs based on these DECLs have been coalesced
1753 into a single partition, which is thus assigned the canonical RTL
1754 location of the DECLs. If in_lto_p, we can't rely on optimize,
1755 a function could be compiled with -O1 -flto first and only the
1756 link performed at -O0. */
1757 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1758 expand_one_var (var, true, true);
1759 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1761 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1762 contain the default def (representing the parm or result itself)
1763 we don't do anything here. But those which don't contain the
1764 default def (representing a temporary based on the parm/result)
1765 we need to allocate space just like for normal VAR_DECLs. */
1766 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1768 expand_one_var (var, true, true);
1769 gcc_assert (SA.partition_to_pseudo[i]);
1774 if (flag_stack_protect == SPCT_FLAG_STRONG)
1775 gen_stack_protect_signal
1776 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1778 /* At this point all variables on the local_decls with TREE_USED
1779 set are not associated with any block scope. Lay them out. */
1781 len = vec_safe_length (cfun->local_decls);
1782 FOR_EACH_LOCAL_DECL (cfun, i, var)
1784 bool expand_now = false;
1786 /* Expanded above already. */
1787 if (is_gimple_reg (var))
1789 TREE_USED (var) = 0;
1790 goto next;
1792 /* We didn't set a block for static or extern because it's hard
1793 to tell the difference between a global variable (re)declared
1794 in a local scope, and one that's really declared there to
1795 begin with. And it doesn't really matter much, since we're
1796 not giving them stack space. Expand them now. */
1797 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1798 expand_now = true;
1800 /* Expand variables not associated with any block now. Those created by
1801 the optimizers could be live anywhere in the function. Those that
1802 could possibly have been scoped originally and detached from their
1803 block will have their allocation deferred so we coalesce them with
1804 others when optimization is enabled. */
1805 else if (TREE_USED (var))
1806 expand_now = true;
1808 /* Finally, mark all variables on the list as used. We'll use
1809 this in a moment when we expand those associated with scopes. */
1810 TREE_USED (var) = 1;
1812 if (expand_now)
1813 expand_one_var (var, true, true);
1815 next:
1816 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1818 rtx rtl = DECL_RTL_IF_SET (var);
1820 /* Keep artificial non-ignored vars in cfun->local_decls
1821 chain until instantiate_decls. */
1822 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1823 add_local_decl (cfun, var);
1824 else if (rtl == NULL_RTX)
1825 /* If rtl isn't set yet, which can happen e.g. with
1826 -fstack-protector, retry before returning from this
1827 function. */
1828 maybe_local_decls.safe_push (var);
1832 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1834 +-----------------+-----------------+
1835 | ...processed... | ...duplicates...|
1836 +-----------------+-----------------+
1838 +-- LEN points here.
1840 We just want the duplicates, as those are the artificial
1841 non-ignored vars that we want to keep until instantiate_decls.
1842 Move them down and truncate the array. */
1843 if (!vec_safe_is_empty (cfun->local_decls))
1844 cfun->local_decls->block_remove (0, len);
1846 /* At this point, all variables within the block tree with TREE_USED
1847 set are actually used by the optimized function. Lay them out. */
1848 expand_used_vars_for_block (outer_block, true);
1850 if (stack_vars_num > 0)
1852 add_scope_conflicts ();
1854 /* If stack protection is enabled, we don't share space between
1855 vulnerable data and non-vulnerable data. */
1856 if (flag_stack_protect != 0
1857 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1858 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1859 && lookup_attribute ("stack_protect",
1860 DECL_ATTRIBUTES (current_function_decl)))))
1861 add_stack_protection_conflicts ();
1863 /* Now that we have collected all stack variables, and have computed a
1864 minimal interference graph, attempt to save some stack space. */
1865 partition_stack_vars ();
1866 if (dump_file)
1867 dump_stack_var_partition ();
1870 switch (flag_stack_protect)
1872 case SPCT_FLAG_ALL:
1873 create_stack_guard ();
1874 break;
1876 case SPCT_FLAG_STRONG:
1877 if (gen_stack_protect_signal
1878 || cfun->calls_alloca || has_protected_decls
1879 || lookup_attribute ("stack_protect",
1880 DECL_ATTRIBUTES (current_function_decl)))
1881 create_stack_guard ();
1882 break;
1884 case SPCT_FLAG_DEFAULT:
1885 if (cfun->calls_alloca || has_protected_decls
1886 || lookup_attribute ("stack_protect",
1887 DECL_ATTRIBUTES (current_function_decl)))
1888 create_stack_guard ();
1889 break;
1891 case SPCT_FLAG_EXPLICIT:
1892 if (lookup_attribute ("stack_protect",
1893 DECL_ATTRIBUTES (current_function_decl)))
1894 create_stack_guard ();
1895 break;
1896 default:
1900 /* Assign rtl to each variable based on these partitions. */
1901 if (stack_vars_num > 0)
1903 struct stack_vars_data data;
1905 data.asan_vec = vNULL;
1906 data.asan_decl_vec = vNULL;
1907 data.asan_base = NULL_RTX;
1908 data.asan_alignb = 0;
1910 /* Reorder decls to be protected by iterating over the variables
1911 array multiple times, and allocating out of each phase in turn. */
1912 /* ??? We could probably integrate this into the qsort we did
1913 earlier, such that we naturally see these variables first,
1914 and thus naturally allocate things in the right order. */
1915 if (has_protected_decls)
1917 /* Phase 1 contains only character arrays. */
1918 expand_stack_vars (stack_protect_decl_phase_1, &data);
1920 /* Phase 2 contains other kinds of arrays. */
1921 if (flag_stack_protect == SPCT_FLAG_ALL
1922 || flag_stack_protect == SPCT_FLAG_STRONG
1923 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1924 && lookup_attribute ("stack_protect",
1925 DECL_ATTRIBUTES (current_function_decl))))
1926 expand_stack_vars (stack_protect_decl_phase_2, &data);
1929 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1930 /* Phase 3, any partitions that need asan protection
1931 in addition to phase 1 and 2. */
1932 expand_stack_vars (asan_decl_phase_3, &data);
1934 if (!data.asan_vec.is_empty ())
1936 HOST_WIDE_INT prev_offset = frame_offset;
1937 HOST_WIDE_INT offset, sz, redzonesz;
1938 redzonesz = ASAN_RED_ZONE_SIZE;
1939 sz = data.asan_vec[0] - prev_offset;
1940 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1941 && data.asan_alignb <= 4096
1942 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1943 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1944 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1945 offset
1946 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1947 data.asan_vec.safe_push (prev_offset);
1948 data.asan_vec.safe_push (offset);
1949 /* Leave space for alignment if STRICT_ALIGNMENT. */
1950 if (STRICT_ALIGNMENT)
1951 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1952 << ASAN_SHADOW_SHIFT)
1953 / BITS_PER_UNIT, 1);
1955 var_end_seq
1956 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1957 data.asan_base,
1958 data.asan_alignb,
1959 data.asan_vec.address (),
1960 data.asan_decl_vec.address (),
1961 data.asan_vec.length ());
1964 expand_stack_vars (NULL, &data);
1966 data.asan_vec.release ();
1967 data.asan_decl_vec.release ();
1970 fini_vars_expansion ();
1972 /* If there were any artificial non-ignored vars without rtl
1973 found earlier, see if deferred stack allocation hasn't assigned
1974 rtl to them. */
1975 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1977 rtx rtl = DECL_RTL_IF_SET (var);
1979 /* Keep artificial non-ignored vars in cfun->local_decls
1980 chain until instantiate_decls. */
1981 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1982 add_local_decl (cfun, var);
1984 maybe_local_decls.release ();
1986 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1987 if (STACK_ALIGNMENT_NEEDED)
1989 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1990 if (!FRAME_GROWS_DOWNWARD)
1991 frame_offset += align - 1;
1992 frame_offset &= -align;
1995 return var_end_seq;
1999 /* If we need to produce a detailed dump, print the tree representation
2000 for STMT to the dump file. SINCE is the last RTX after which the RTL
2001 generated for STMT should have been appended. */
2003 static void
2004 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2006 if (dump_file && (dump_flags & TDF_DETAILS))
2008 fprintf (dump_file, "\n;; ");
2009 print_gimple_stmt (dump_file, stmt, 0,
2010 TDF_SLIM | (dump_flags & TDF_LINENO));
2011 fprintf (dump_file, "\n");
2013 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2017 /* Maps the blocks that do not contain tree labels to rtx labels. */
2019 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2021 /* Returns the label_rtx expression for a label starting basic block BB. */
2023 static rtx_code_label *
2024 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2026 gimple_stmt_iterator gsi;
2027 tree lab;
2029 if (bb->flags & BB_RTL)
2030 return block_label (bb);
2032 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2033 if (elt)
2034 return *elt;
2036 /* Find the tree label if it is present. */
2038 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2040 glabel *lab_stmt;
2042 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2043 if (!lab_stmt)
2044 break;
2046 lab = gimple_label_label (lab_stmt);
2047 if (DECL_NONLOCAL (lab))
2048 break;
2050 return jump_target_rtx (lab);
2053 rtx_code_label *l = gen_label_rtx ();
2054 lab_rtx_for_bb->put (bb, l);
2055 return l;
2059 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2060 of a basic block where we just expanded the conditional at the end,
2061 possibly clean up the CFG and instruction sequence. LAST is the
2062 last instruction before the just emitted jump sequence. */
2064 static void
2065 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2067 /* Special case: when jumpif decides that the condition is
2068 trivial it emits an unconditional jump (and the necessary
2069 barrier). But we still have two edges, the fallthru one is
2070 wrong. purge_dead_edges would clean this up later. Unfortunately
2071 we have to insert insns (and split edges) before
2072 find_many_sub_basic_blocks and hence before purge_dead_edges.
2073 But splitting edges might create new blocks which depend on the
2074 fact that if there are two edges there's no barrier. So the
2075 barrier would get lost and verify_flow_info would ICE. Instead
2076 of auditing all edge splitters to care for the barrier (which
2077 normally isn't there in a cleaned CFG), fix it here. */
2078 if (BARRIER_P (get_last_insn ()))
2080 rtx_insn *insn;
2081 remove_edge (e);
2082 /* Now, we have a single successor block, if we have insns to
2083 insert on the remaining edge we potentially will insert
2084 it at the end of this block (if the dest block isn't feasible)
2085 in order to avoid splitting the edge. This insertion will take
2086 place in front of the last jump. But we might have emitted
2087 multiple jumps (conditional and one unconditional) to the
2088 same destination. Inserting in front of the last one then
2089 is a problem. See PR 40021. We fix this by deleting all
2090 jumps except the last unconditional one. */
2091 insn = PREV_INSN (get_last_insn ());
2092 /* Make sure we have an unconditional jump. Otherwise we're
2093 confused. */
2094 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2095 for (insn = PREV_INSN (insn); insn != last;)
2097 insn = PREV_INSN (insn);
2098 if (JUMP_P (NEXT_INSN (insn)))
2100 if (!any_condjump_p (NEXT_INSN (insn)))
2102 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2103 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2105 delete_insn (NEXT_INSN (insn));
2111 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2112 Returns a new basic block if we've terminated the current basic
2113 block and created a new one. */
2115 static basic_block
2116 expand_gimple_cond (basic_block bb, gcond *stmt)
2118 basic_block new_bb, dest;
2119 edge new_edge;
2120 edge true_edge;
2121 edge false_edge;
2122 rtx_insn *last2, *last;
2123 enum tree_code code;
2124 tree op0, op1;
2126 code = gimple_cond_code (stmt);
2127 op0 = gimple_cond_lhs (stmt);
2128 op1 = gimple_cond_rhs (stmt);
2129 /* We're sometimes presented with such code:
2130 D.123_1 = x < y;
2131 if (D.123_1 != 0)
2133 This would expand to two comparisons which then later might
2134 be cleaned up by combine. But some pattern matchers like if-conversion
2135 work better when there's only one compare, so make up for this
2136 here as special exception if TER would have made the same change. */
2137 if (SA.values
2138 && TREE_CODE (op0) == SSA_NAME
2139 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2140 && TREE_CODE (op1) == INTEGER_CST
2141 && ((gimple_cond_code (stmt) == NE_EXPR
2142 && integer_zerop (op1))
2143 || (gimple_cond_code (stmt) == EQ_EXPR
2144 && integer_onep (op1)))
2145 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2147 gimple second = SSA_NAME_DEF_STMT (op0);
2148 if (gimple_code (second) == GIMPLE_ASSIGN)
2150 enum tree_code code2 = gimple_assign_rhs_code (second);
2151 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2153 code = code2;
2154 op0 = gimple_assign_rhs1 (second);
2155 op1 = gimple_assign_rhs2 (second);
2157 /* If jumps are cheap and the target does not support conditional
2158 compare, turn some more codes into jumpy sequences. */
2159 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2160 && targetm.gen_ccmp_first == NULL)
2162 if ((code2 == BIT_AND_EXPR
2163 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2164 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2165 || code2 == TRUTH_AND_EXPR)
2167 code = TRUTH_ANDIF_EXPR;
2168 op0 = gimple_assign_rhs1 (second);
2169 op1 = gimple_assign_rhs2 (second);
2171 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2173 code = TRUTH_ORIF_EXPR;
2174 op0 = gimple_assign_rhs1 (second);
2175 op1 = gimple_assign_rhs2 (second);
2181 last2 = last = get_last_insn ();
2183 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2184 set_curr_insn_location (gimple_location (stmt));
2186 /* These flags have no purpose in RTL land. */
2187 true_edge->flags &= ~EDGE_TRUE_VALUE;
2188 false_edge->flags &= ~EDGE_FALSE_VALUE;
2190 /* We can either have a pure conditional jump with one fallthru edge or
2191 two-way jump that needs to be decomposed into two basic blocks. */
2192 if (false_edge->dest == bb->next_bb)
2194 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2195 true_edge->probability);
2196 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2197 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2198 set_curr_insn_location (true_edge->goto_locus);
2199 false_edge->flags |= EDGE_FALLTHRU;
2200 maybe_cleanup_end_of_block (false_edge, last);
2201 return NULL;
2203 if (true_edge->dest == bb->next_bb)
2205 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2206 false_edge->probability);
2207 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2208 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2209 set_curr_insn_location (false_edge->goto_locus);
2210 true_edge->flags |= EDGE_FALLTHRU;
2211 maybe_cleanup_end_of_block (true_edge, last);
2212 return NULL;
2215 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2216 true_edge->probability);
2217 last = get_last_insn ();
2218 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2219 set_curr_insn_location (false_edge->goto_locus);
2220 emit_jump (label_rtx_for_bb (false_edge->dest));
2222 BB_END (bb) = last;
2223 if (BARRIER_P (BB_END (bb)))
2224 BB_END (bb) = PREV_INSN (BB_END (bb));
2225 update_bb_for_insn (bb);
2227 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2228 dest = false_edge->dest;
2229 redirect_edge_succ (false_edge, new_bb);
2230 false_edge->flags |= EDGE_FALLTHRU;
2231 new_bb->count = false_edge->count;
2232 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2233 add_bb_to_loop (new_bb, bb->loop_father);
2234 new_edge = make_edge (new_bb, dest, 0);
2235 new_edge->probability = REG_BR_PROB_BASE;
2236 new_edge->count = new_bb->count;
2237 if (BARRIER_P (BB_END (new_bb)))
2238 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2239 update_bb_for_insn (new_bb);
2241 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2243 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2245 set_curr_insn_location (true_edge->goto_locus);
2246 true_edge->goto_locus = curr_insn_location ();
2249 return new_bb;
2252 /* Mark all calls that can have a transaction restart. */
2254 static void
2255 mark_transaction_restart_calls (gimple stmt)
2257 struct tm_restart_node dummy;
2258 tm_restart_node **slot;
2260 if (!cfun->gimple_df->tm_restart)
2261 return;
2263 dummy.stmt = stmt;
2264 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2265 if (slot)
2267 struct tm_restart_node *n = *slot;
2268 tree list = n->label_or_list;
2269 rtx_insn *insn;
2271 for (insn = next_real_insn (get_last_insn ());
2272 !CALL_P (insn);
2273 insn = next_real_insn (insn))
2274 continue;
2276 if (TREE_CODE (list) == LABEL_DECL)
2277 add_reg_note (insn, REG_TM, label_rtx (list));
2278 else
2279 for (; list ; list = TREE_CHAIN (list))
2280 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2284 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2285 statement STMT. */
2287 static void
2288 expand_call_stmt (gcall *stmt)
2290 tree exp, decl, lhs;
2291 bool builtin_p;
2292 size_t i;
2294 if (gimple_call_internal_p (stmt))
2296 expand_internal_call (stmt);
2297 return;
2300 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2302 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2303 decl = gimple_call_fndecl (stmt);
2304 builtin_p = decl && DECL_BUILT_IN (decl);
2306 /* If this is not a builtin function, the function type through which the
2307 call is made may be different from the type of the function. */
2308 if (!builtin_p)
2309 CALL_EXPR_FN (exp)
2310 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2311 CALL_EXPR_FN (exp));
2313 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2314 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2316 for (i = 0; i < gimple_call_num_args (stmt); i++)
2318 tree arg = gimple_call_arg (stmt, i);
2319 gimple def;
2320 /* TER addresses into arguments of builtin functions so we have a
2321 chance to infer more correct alignment information. See PR39954. */
2322 if (builtin_p
2323 && TREE_CODE (arg) == SSA_NAME
2324 && (def = get_gimple_for_ssa_name (arg))
2325 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2326 arg = gimple_assign_rhs1 (def);
2327 CALL_EXPR_ARG (exp, i) = arg;
2330 if (gimple_has_side_effects (stmt))
2331 TREE_SIDE_EFFECTS (exp) = 1;
2333 if (gimple_call_nothrow_p (stmt))
2334 TREE_NOTHROW (exp) = 1;
2336 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2337 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2338 if (decl
2339 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2340 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2341 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2342 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2343 else
2344 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2345 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2346 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2347 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2349 /* Ensure RTL is created for debug args. */
2350 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2352 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2353 unsigned int ix;
2354 tree dtemp;
2356 if (debug_args)
2357 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2359 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2360 expand_debug_expr (dtemp);
2364 lhs = gimple_call_lhs (stmt);
2365 if (lhs)
2366 expand_assignment (lhs, exp, false);
2367 else
2368 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2370 mark_transaction_restart_calls (stmt);
2374 /* Generate RTL for an asm statement (explicit assembler code).
2375 STRING is a STRING_CST node containing the assembler code text,
2376 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2377 insn is volatile; don't optimize it. */
2379 static void
2380 expand_asm_loc (tree string, int vol, location_t locus)
2382 rtx body;
2384 if (TREE_CODE (string) == ADDR_EXPR)
2385 string = TREE_OPERAND (string, 0);
2387 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2388 ggc_strdup (TREE_STRING_POINTER (string)),
2389 locus);
2391 MEM_VOLATILE_P (body) = vol;
2393 emit_insn (body);
2396 /* Return the number of times character C occurs in string S. */
2397 static int
2398 n_occurrences (int c, const char *s)
2400 int n = 0;
2401 while (*s)
2402 n += (*s++ == c);
2403 return n;
2406 /* A subroutine of expand_asm_operands. Check that all operands have
2407 the same number of alternatives. Return true if so. */
2409 static bool
2410 check_operand_nalternatives (const vec<const char *> &constraints)
2412 unsigned len = constraints.length();
2413 if (len > 0)
2415 int nalternatives = n_occurrences (',', constraints[0]);
2417 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2419 error ("too many alternatives in %<asm%>");
2420 return false;
2423 for (unsigned i = 1; i < len; ++i)
2424 if (n_occurrences (',', constraints[i]) != nalternatives)
2426 error ("operand constraints for %<asm%> differ "
2427 "in number of alternatives");
2428 return false;
2431 return true;
2434 /* Check for overlap between registers marked in CLOBBERED_REGS and
2435 anything inappropriate in T. Emit error and return the register
2436 variable definition for error, NULL_TREE for ok. */
2438 static bool
2439 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2441 /* Conflicts between asm-declared register variables and the clobber
2442 list are not allowed. */
2443 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2445 if (overlap)
2447 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2448 DECL_NAME (overlap));
2450 /* Reset registerness to stop multiple errors emitted for a single
2451 variable. */
2452 DECL_REGISTER (overlap) = 0;
2453 return true;
2456 return false;
2459 /* Generate RTL for an asm statement with arguments.
2460 STRING is the instruction template.
2461 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2462 Each output or input has an expression in the TREE_VALUE and
2463 a tree list in TREE_PURPOSE which in turn contains a constraint
2464 name in TREE_VALUE (or NULL_TREE) and a constraint string
2465 in TREE_PURPOSE.
2466 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2467 that is clobbered by this insn.
2469 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2470 should be the fallthru basic block of the asm goto.
2472 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2473 Some elements of OUTPUTS may be replaced with trees representing temporary
2474 values. The caller should copy those temporary values to the originally
2475 specified lvalues.
2477 VOL nonzero means the insn is volatile; don't optimize it. */
2479 static void
2480 expand_asm_stmt (gasm *stmt)
2482 class save_input_location
2484 location_t old;
2486 public:
2487 explicit save_input_location(location_t where)
2489 old = input_location;
2490 input_location = where;
2493 ~save_input_location()
2495 input_location = old;
2499 location_t locus = gimple_location (stmt);
2501 if (gimple_asm_input_p (stmt))
2503 const char *s = gimple_asm_string (stmt);
2504 tree string = build_string (strlen (s), s);
2505 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2506 return;
2509 /* There are some legacy diagnostics in here, and also avoids a
2510 sixth parameger to targetm.md_asm_adjust. */
2511 save_input_location s_i_l(locus);
2513 unsigned noutputs = gimple_asm_noutputs (stmt);
2514 unsigned ninputs = gimple_asm_ninputs (stmt);
2515 unsigned nlabels = gimple_asm_nlabels (stmt);
2516 unsigned i;
2518 /* ??? Diagnose during gimplification? */
2519 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2521 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2522 return;
2525 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2526 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2527 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2529 /* Copy the gimple vectors into new vectors that we can manipulate. */
2531 output_tvec.safe_grow (noutputs);
2532 input_tvec.safe_grow (ninputs);
2533 constraints.safe_grow (noutputs + ninputs);
2535 for (i = 0; i < noutputs; ++i)
2537 tree t = gimple_asm_output_op (stmt, i);
2538 output_tvec[i] = TREE_VALUE (t);
2539 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2541 for (i = 0; i < ninputs; i++)
2543 tree t = gimple_asm_input_op (stmt, i);
2544 input_tvec[i] = TREE_VALUE (t);
2545 constraints[i + noutputs]
2546 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2549 /* ??? Diagnose during gimplification? */
2550 if (! check_operand_nalternatives (constraints))
2551 return;
2553 /* Count the number of meaningful clobbered registers, ignoring what
2554 we would ignore later. */
2555 auto_vec<rtx> clobber_rvec;
2556 HARD_REG_SET clobbered_regs;
2557 CLEAR_HARD_REG_SET (clobbered_regs);
2559 if (unsigned n = gimple_asm_nclobbers (stmt))
2561 clobber_rvec.reserve (n);
2562 for (i = 0; i < n; i++)
2564 tree t = gimple_asm_clobber_op (stmt, i);
2565 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2566 int nregs, j;
2568 j = decode_reg_name_and_count (regname, &nregs);
2569 if (j < 0)
2571 if (j == -2)
2573 /* ??? Diagnose during gimplification? */
2574 error ("unknown register name %qs in %<asm%>", regname);
2576 else if (j == -4)
2578 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2579 clobber_rvec.safe_push (x);
2581 else
2583 /* Otherwise we should have -1 == empty string
2584 or -3 == cc, which is not a register. */
2585 gcc_assert (j == -1 || j == -3);
2588 else
2589 for (int reg = j; reg < j + nregs; reg++)
2591 /* Clobbering the PIC register is an error. */
2592 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2594 /* ??? Diagnose during gimplification? */
2595 error ("PIC register clobbered by %qs in %<asm%>",
2596 regname);
2597 return;
2600 SET_HARD_REG_BIT (clobbered_regs, reg);
2601 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2602 clobber_rvec.safe_push (x);
2606 unsigned nclobbers = clobber_rvec.length();
2608 /* First pass over inputs and outputs checks validity and sets
2609 mark_addressable if needed. */
2610 /* ??? Diagnose during gimplification? */
2612 for (i = 0; i < noutputs; ++i)
2614 tree val = output_tvec[i];
2615 tree type = TREE_TYPE (val);
2616 const char *constraint;
2617 bool is_inout;
2618 bool allows_reg;
2619 bool allows_mem;
2621 /* Try to parse the output constraint. If that fails, there's
2622 no point in going further. */
2623 constraint = constraints[i];
2624 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2625 &allows_mem, &allows_reg, &is_inout))
2626 return;
2628 if (! allows_reg
2629 && (allows_mem
2630 || is_inout
2631 || (DECL_P (val)
2632 && REG_P (DECL_RTL (val))
2633 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2634 mark_addressable (val);
2637 for (i = 0; i < ninputs; ++i)
2639 bool allows_reg, allows_mem;
2640 const char *constraint;
2642 constraint = constraints[i + noutputs];
2643 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2644 constraints.address (),
2645 &allows_mem, &allows_reg))
2646 return;
2648 if (! allows_reg && allows_mem)
2649 mark_addressable (input_tvec[i]);
2652 /* Second pass evaluates arguments. */
2654 /* Make sure stack is consistent for asm goto. */
2655 if (nlabels > 0)
2656 do_pending_stack_adjust ();
2657 int old_generating_concat_p = generating_concat_p;
2659 /* Vector of RTX's of evaluated output operands. */
2660 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2661 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2662 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
2664 output_rvec.safe_grow (noutputs);
2666 for (i = 0; i < noutputs; ++i)
2668 tree val = output_tvec[i];
2669 tree type = TREE_TYPE (val);
2670 bool is_inout, allows_reg, allows_mem, ok;
2671 rtx op;
2673 ok = parse_output_constraint (&constraints[i], i, ninputs,
2674 noutputs, &allows_mem, &allows_reg,
2675 &is_inout);
2676 gcc_assert (ok);
2678 /* If an output operand is not a decl or indirect ref and our constraint
2679 allows a register, make a temporary to act as an intermediate.
2680 Make the asm insn write into that, then we will copy it to
2681 the real output operand. Likewise for promoted variables. */
2683 generating_concat_p = 0;
2685 if ((TREE_CODE (val) == INDIRECT_REF
2686 && allows_mem)
2687 || (DECL_P (val)
2688 && (allows_mem || REG_P (DECL_RTL (val)))
2689 && ! (REG_P (DECL_RTL (val))
2690 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2691 || ! allows_reg
2692 || is_inout)
2694 op = expand_expr (val, NULL_RTX, VOIDmode,
2695 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2696 if (MEM_P (op))
2697 op = validize_mem (op);
2699 if (! allows_reg && !MEM_P (op))
2700 error ("output number %d not directly addressable", i);
2701 if ((! allows_mem && MEM_P (op))
2702 || GET_CODE (op) == CONCAT)
2704 rtx old_op = op;
2705 op = gen_reg_rtx (GET_MODE (op));
2707 generating_concat_p = old_generating_concat_p;
2709 if (is_inout)
2710 emit_move_insn (op, old_op);
2712 push_to_sequence2 (after_rtl_seq, after_rtl_end);
2713 emit_move_insn (old_op, op);
2714 after_rtl_seq = get_insns ();
2715 after_rtl_end = get_last_insn ();
2716 end_sequence ();
2719 else
2721 op = assign_temp (type, 0, 1);
2722 op = validize_mem (op);
2723 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
2724 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
2726 generating_concat_p = old_generating_concat_p;
2728 push_to_sequence2 (after_rtl_seq, after_rtl_end);
2729 expand_assignment (val, make_tree (type, op), false);
2730 after_rtl_seq = get_insns ();
2731 after_rtl_end = get_last_insn ();
2732 end_sequence ();
2734 output_rvec[i] = op;
2736 if (is_inout)
2737 inout_opnum.safe_push (i);
2740 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
2741 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
2743 input_rvec.safe_grow (ninputs);
2744 input_mode.safe_grow (ninputs);
2746 generating_concat_p = 0;
2748 for (i = 0; i < ninputs; ++i)
2750 tree val = input_tvec[i];
2751 tree type = TREE_TYPE (val);
2752 bool allows_reg, allows_mem, ok;
2753 const char *constraint;
2754 rtx op;
2756 constraint = constraints[i + noutputs];
2757 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2758 constraints.address (),
2759 &allows_mem, &allows_reg);
2760 gcc_assert (ok);
2762 /* EXPAND_INITIALIZER will not generate code for valid initializer
2763 constants, but will still generate code for other types of operand.
2764 This is the behavior we want for constant constraints. */
2765 op = expand_expr (val, NULL_RTX, VOIDmode,
2766 allows_reg ? EXPAND_NORMAL
2767 : allows_mem ? EXPAND_MEMORY
2768 : EXPAND_INITIALIZER);
2770 /* Never pass a CONCAT to an ASM. */
2771 if (GET_CODE (op) == CONCAT)
2772 op = force_reg (GET_MODE (op), op);
2773 else if (MEM_P (op))
2774 op = validize_mem (op);
2776 if (asm_operand_ok (op, constraint, NULL) <= 0)
2778 if (allows_reg && TYPE_MODE (type) != BLKmode)
2779 op = force_reg (TYPE_MODE (type), op);
2780 else if (!allows_mem)
2781 warning (0, "asm operand %d probably doesn%'t match constraints",
2782 i + noutputs);
2783 else if (MEM_P (op))
2785 /* We won't recognize either volatile memory or memory
2786 with a queued address as available a memory_operand
2787 at this point. Ignore it: clearly this *is* a memory. */
2789 else
2790 gcc_unreachable ();
2792 input_rvec[i] = op;
2793 input_mode[i] = TYPE_MODE (type);
2796 /* For in-out operands, copy output rtx to input rtx. */
2797 unsigned ninout = inout_opnum.length();
2798 for (i = 0; i < ninout; i++)
2800 int j = inout_opnum[i];
2801 rtx o = output_rvec[j];
2803 input_rvec.safe_push (o);
2804 input_mode.safe_push (GET_MODE (o));
2806 char buffer[16];
2807 sprintf (buffer, "%d", j);
2808 constraints.safe_push (ggc_strdup (buffer));
2810 ninputs += ninout;
2812 /* Sometimes we wish to automatically clobber registers across an asm.
2813 Case in point is when the i386 backend moved from cc0 to a hard reg --
2814 maintaining source-level compatibility means automatically clobbering
2815 the flags register. */
2816 rtx_insn *after_md_seq = NULL;
2817 if (targetm.md_asm_adjust)
2818 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
2819 constraints, clobber_rvec,
2820 clobbered_regs);
2822 /* Do not allow the hook to change the output and input count,
2823 lest it mess up the operand numbering. */
2824 gcc_assert (output_rvec.length() == noutputs);
2825 gcc_assert (input_rvec.length() == ninputs);
2826 gcc_assert (constraints.length() == noutputs + ninputs);
2828 /* But it certainly can adjust the clobbers. */
2829 nclobbers = clobber_rvec.length();
2831 /* Third pass checks for easy conflicts. */
2832 /* ??? Why are we doing this on trees instead of rtx. */
2834 bool clobber_conflict_found = 0;
2835 for (i = 0; i < noutputs; ++i)
2836 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
2837 clobber_conflict_found = 1;
2838 for (i = 0; i < ninputs - ninout; ++i)
2839 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
2840 clobber_conflict_found = 1;
2842 /* Make vectors for the expression-rtx, constraint strings,
2843 and named operands. */
2845 rtvec argvec = rtvec_alloc (ninputs);
2846 rtvec constraintvec = rtvec_alloc (ninputs);
2847 rtvec labelvec = rtvec_alloc (nlabels);
2849 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2850 : GET_MODE (output_rvec[0])),
2851 ggc_strdup (gimple_asm_string (stmt)),
2852 empty_string, 0, argvec, constraintvec,
2853 labelvec, locus);
2854 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
2856 for (i = 0; i < ninputs; ++i)
2858 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
2859 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2860 = gen_rtx_ASM_INPUT_loc (input_mode[i],
2861 constraints[i + noutputs],
2862 locus);
2865 /* Copy labels to the vector. */
2866 rtx_code_label *fallthru_label = NULL;
2867 if (nlabels > 0)
2869 basic_block fallthru_bb = NULL;
2870 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2871 if (fallthru)
2872 fallthru_bb = fallthru->dest;
2874 for (i = 0; i < nlabels; ++i)
2876 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
2877 rtx_insn *r;
2878 /* If asm goto has any labels in the fallthru basic block, use
2879 a label that we emit immediately after the asm goto. Expansion
2880 may insert further instructions into the same basic block after
2881 asm goto and if we don't do this, insertion of instructions on
2882 the fallthru edge might misbehave. See PR58670. */
2883 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
2885 if (fallthru_label == NULL_RTX)
2886 fallthru_label = gen_label_rtx ();
2887 r = fallthru_label;
2889 else
2890 r = label_rtx (label);
2891 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2895 /* Now, for each output, construct an rtx
2896 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2897 ARGVEC CONSTRAINTS OPNAMES))
2898 If there is more than one, put them inside a PARALLEL. */
2900 if (nlabels > 0 && nclobbers == 0)
2902 gcc_assert (noutputs == 0);
2903 emit_jump_insn (body);
2905 else if (noutputs == 0 && nclobbers == 0)
2907 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2908 emit_insn (body);
2910 else if (noutputs == 1 && nclobbers == 0)
2912 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
2913 emit_insn (gen_rtx_SET (output_rvec[0], body));
2915 else
2917 rtx obody = body;
2918 int num = noutputs;
2920 if (num == 0)
2921 num = 1;
2923 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2925 /* For each output operand, store a SET. */
2926 for (i = 0; i < noutputs; ++i)
2928 rtx src, o = output_rvec[i];
2929 if (i == 0)
2931 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
2932 src = obody;
2934 else
2936 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
2937 ASM_OPERANDS_TEMPLATE (obody),
2938 constraints[i], i, argvec,
2939 constraintvec, labelvec, locus);
2940 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
2942 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
2945 /* If there are no outputs (but there are some clobbers)
2946 store the bare ASM_OPERANDS into the PARALLEL. */
2947 if (i == 0)
2948 XVECEXP (body, 0, i++) = obody;
2950 /* Store (clobber REG) for each clobbered register specified. */
2951 for (unsigned j = 0; j < nclobbers; ++j)
2953 rtx clobbered_reg = clobber_rvec[j];
2955 /* Do sanity check for overlap between clobbers and respectively
2956 input and outputs that hasn't been handled. Such overlap
2957 should have been detected and reported above. */
2958 if (!clobber_conflict_found && REG_P (clobbered_reg))
2960 /* We test the old body (obody) contents to avoid
2961 tripping over the under-construction body. */
2962 for (unsigned k = 0; k < noutputs; ++k)
2963 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
2964 internal_error ("asm clobber conflict with output operand");
2966 for (unsigned k = 0; k < ninputs - ninout; ++k)
2967 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
2968 internal_error ("asm clobber conflict with input operand");
2971 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2974 if (nlabels > 0)
2975 emit_jump_insn (body);
2976 else
2977 emit_insn (body);
2980 generating_concat_p = old_generating_concat_p;
2982 if (fallthru_label)
2983 emit_label (fallthru_label);
2985 if (after_md_seq)
2986 emit_insn (after_md_seq);
2987 if (after_rtl_seq)
2988 emit_insn (after_rtl_seq);
2990 free_temp_slots ();
2991 crtl->has_asm_statement = 1;
2994 /* Emit code to jump to the address
2995 specified by the pointer expression EXP. */
2997 static void
2998 expand_computed_goto (tree exp)
3000 rtx x = expand_normal (exp);
3002 do_pending_stack_adjust ();
3003 emit_indirect_jump (x);
3006 /* Generate RTL code for a `goto' statement with target label LABEL.
3007 LABEL should be a LABEL_DECL tree node that was or will later be
3008 defined with `expand_label'. */
3010 static void
3011 expand_goto (tree label)
3013 #ifdef ENABLE_CHECKING
3014 /* Check for a nonlocal goto to a containing function. Should have
3015 gotten translated to __builtin_nonlocal_goto. */
3016 tree context = decl_function_context (label);
3017 gcc_assert (!context || context == current_function_decl);
3018 #endif
3020 emit_jump (jump_target_rtx (label));
3023 /* Output a return with no value. */
3025 static void
3026 expand_null_return_1 (void)
3028 clear_pending_stack_adjust ();
3029 do_pending_stack_adjust ();
3030 emit_jump (return_label);
3033 /* Generate RTL to return from the current function, with no value.
3034 (That is, we do not do anything about returning any value.) */
3036 void
3037 expand_null_return (void)
3039 /* If this function was declared to return a value, but we
3040 didn't, clobber the return registers so that they are not
3041 propagated live to the rest of the function. */
3042 clobber_return_register ();
3044 expand_null_return_1 ();
3047 /* Generate RTL to return from the current function, with value VAL. */
3049 static void
3050 expand_value_return (rtx val)
3052 /* Copy the value to the return location unless it's already there. */
3054 tree decl = DECL_RESULT (current_function_decl);
3055 rtx return_reg = DECL_RTL (decl);
3056 if (return_reg != val)
3058 tree funtype = TREE_TYPE (current_function_decl);
3059 tree type = TREE_TYPE (decl);
3060 int unsignedp = TYPE_UNSIGNED (type);
3061 machine_mode old_mode = DECL_MODE (decl);
3062 machine_mode mode;
3063 if (DECL_BY_REFERENCE (decl))
3064 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3065 else
3066 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3068 if (mode != old_mode)
3069 val = convert_modes (mode, old_mode, val, unsignedp);
3071 if (GET_CODE (return_reg) == PARALLEL)
3072 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3073 else
3074 emit_move_insn (return_reg, val);
3077 expand_null_return_1 ();
3080 /* Generate RTL to evaluate the expression RETVAL and return it
3081 from the current function. */
3083 static void
3084 expand_return (tree retval, tree bounds)
3086 rtx result_rtl;
3087 rtx val = 0;
3088 tree retval_rhs;
3089 rtx bounds_rtl;
3091 /* If function wants no value, give it none. */
3092 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3094 expand_normal (retval);
3095 expand_null_return ();
3096 return;
3099 if (retval == error_mark_node)
3101 /* Treat this like a return of no value from a function that
3102 returns a value. */
3103 expand_null_return ();
3104 return;
3106 else if ((TREE_CODE (retval) == MODIFY_EXPR
3107 || TREE_CODE (retval) == INIT_EXPR)
3108 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3109 retval_rhs = TREE_OPERAND (retval, 1);
3110 else
3111 retval_rhs = retval;
3113 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3115 /* Put returned bounds to the right place. */
3116 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3117 if (bounds_rtl)
3119 rtx addr = NULL;
3120 rtx bnd = NULL;
3122 if (bounds && bounds != error_mark_node)
3124 bnd = expand_normal (bounds);
3125 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3127 else if (REG_P (bounds_rtl))
3129 if (bounds)
3130 bnd = chkp_expand_zero_bounds ();
3131 else
3133 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3134 addr = gen_rtx_MEM (Pmode, addr);
3135 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3138 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3140 else
3142 int n;
3144 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3146 if (bounds)
3147 bnd = chkp_expand_zero_bounds ();
3148 else
3150 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3151 addr = gen_rtx_MEM (Pmode, addr);
3154 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3156 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3157 if (!bounds)
3159 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3160 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3161 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3163 targetm.calls.store_returned_bounds (slot, bnd);
3167 else if (chkp_function_instrumented_p (current_function_decl)
3168 && !BOUNDED_P (retval_rhs)
3169 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3170 && TREE_CODE (retval_rhs) != RESULT_DECL)
3172 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3173 addr = gen_rtx_MEM (Pmode, addr);
3175 gcc_assert (MEM_P (result_rtl));
3177 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3180 /* If we are returning the RESULT_DECL, then the value has already
3181 been stored into it, so we don't have to do anything special. */
3182 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3183 expand_value_return (result_rtl);
3185 /* If the result is an aggregate that is being returned in one (or more)
3186 registers, load the registers here. */
3188 else if (retval_rhs != 0
3189 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3190 && REG_P (result_rtl))
3192 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3193 if (val)
3195 /* Use the mode of the result value on the return register. */
3196 PUT_MODE (result_rtl, GET_MODE (val));
3197 expand_value_return (val);
3199 else
3200 expand_null_return ();
3202 else if (retval_rhs != 0
3203 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3204 && (REG_P (result_rtl)
3205 || (GET_CODE (result_rtl) == PARALLEL)))
3207 /* Compute the return value into a temporary (usually a pseudo reg). */
3209 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3210 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3211 val = force_not_mem (val);
3212 expand_value_return (val);
3214 else
3216 /* No hard reg used; calculate value into hard return reg. */
3217 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3218 expand_value_return (result_rtl);
3222 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3223 STMT that doesn't require special handling for outgoing edges. That
3224 is no tailcalls and no GIMPLE_COND. */
3226 static void
3227 expand_gimple_stmt_1 (gimple stmt)
3229 tree op0;
3231 set_curr_insn_location (gimple_location (stmt));
3233 switch (gimple_code (stmt))
3235 case GIMPLE_GOTO:
3236 op0 = gimple_goto_dest (stmt);
3237 if (TREE_CODE (op0) == LABEL_DECL)
3238 expand_goto (op0);
3239 else
3240 expand_computed_goto (op0);
3241 break;
3242 case GIMPLE_LABEL:
3243 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3244 break;
3245 case GIMPLE_NOP:
3246 case GIMPLE_PREDICT:
3247 break;
3248 case GIMPLE_SWITCH:
3249 expand_case (as_a <gswitch *> (stmt));
3250 break;
3251 case GIMPLE_ASM:
3252 expand_asm_stmt (as_a <gasm *> (stmt));
3253 break;
3254 case GIMPLE_CALL:
3255 expand_call_stmt (as_a <gcall *> (stmt));
3256 break;
3258 case GIMPLE_RETURN:
3260 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3261 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3263 if (op0 && op0 != error_mark_node)
3265 tree result = DECL_RESULT (current_function_decl);
3267 /* If we are not returning the current function's RESULT_DECL,
3268 build an assignment to it. */
3269 if (op0 != result)
3271 /* I believe that a function's RESULT_DECL is unique. */
3272 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3274 /* ??? We'd like to use simply expand_assignment here,
3275 but this fails if the value is of BLKmode but the return
3276 decl is a register. expand_return has special handling
3277 for this combination, which eventually should move
3278 to common code. See comments there. Until then, let's
3279 build a modify expression :-/ */
3280 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3281 result, op0);
3283 /* Mark we have return statement with missing bounds. */
3284 if (!bnd && chkp_function_instrumented_p (cfun->decl))
3285 bnd = error_mark_node;
3288 if (!op0)
3289 expand_null_return ();
3290 else
3291 expand_return (op0, bnd);
3293 break;
3295 case GIMPLE_ASSIGN:
3297 gassign *assign_stmt = as_a <gassign *> (stmt);
3298 tree lhs = gimple_assign_lhs (assign_stmt);
3300 /* Tree expand used to fiddle with |= and &= of two bitfield
3301 COMPONENT_REFs here. This can't happen with gimple, the LHS
3302 of binary assigns must be a gimple reg. */
3304 if (TREE_CODE (lhs) != SSA_NAME
3305 || get_gimple_rhs_class (gimple_expr_code (stmt))
3306 == GIMPLE_SINGLE_RHS)
3308 tree rhs = gimple_assign_rhs1 (assign_stmt);
3309 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3310 == GIMPLE_SINGLE_RHS);
3311 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3312 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3313 if (TREE_CLOBBER_P (rhs))
3314 /* This is a clobber to mark the going out of scope for
3315 this LHS. */
3317 else
3318 expand_assignment (lhs, rhs,
3319 gimple_assign_nontemporal_move_p (
3320 assign_stmt));
3322 else
3324 rtx target, temp;
3325 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3326 struct separate_ops ops;
3327 bool promoted = false;
3329 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3330 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3331 promoted = true;
3333 ops.code = gimple_assign_rhs_code (assign_stmt);
3334 ops.type = TREE_TYPE (lhs);
3335 switch (get_gimple_rhs_class (ops.code))
3337 case GIMPLE_TERNARY_RHS:
3338 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3339 /* Fallthru */
3340 case GIMPLE_BINARY_RHS:
3341 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3342 /* Fallthru */
3343 case GIMPLE_UNARY_RHS:
3344 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3345 break;
3346 default:
3347 gcc_unreachable ();
3349 ops.location = gimple_location (stmt);
3351 /* If we want to use a nontemporal store, force the value to
3352 register first. If we store into a promoted register,
3353 don't directly expand to target. */
3354 temp = nontemporal || promoted ? NULL_RTX : target;
3355 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3356 EXPAND_NORMAL);
3358 if (temp == target)
3360 else if (promoted)
3362 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3363 /* If TEMP is a VOIDmode constant, use convert_modes to make
3364 sure that we properly convert it. */
3365 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3367 temp = convert_modes (GET_MODE (target),
3368 TYPE_MODE (ops.type),
3369 temp, unsignedp);
3370 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3371 GET_MODE (target), temp, unsignedp);
3374 convert_move (SUBREG_REG (target), temp, unsignedp);
3376 else if (nontemporal && emit_storent_insn (target, temp))
3378 else
3380 temp = force_operand (temp, target);
3381 if (temp != target)
3382 emit_move_insn (target, temp);
3386 break;
3388 default:
3389 gcc_unreachable ();
3393 /* Expand one gimple statement STMT and return the last RTL instruction
3394 before any of the newly generated ones.
3396 In addition to generating the necessary RTL instructions this also
3397 sets REG_EH_REGION notes if necessary and sets the current source
3398 location for diagnostics. */
3400 static rtx_insn *
3401 expand_gimple_stmt (gimple stmt)
3403 location_t saved_location = input_location;
3404 rtx_insn *last = get_last_insn ();
3405 int lp_nr;
3407 gcc_assert (cfun);
3409 /* We need to save and restore the current source location so that errors
3410 discovered during expansion are emitted with the right location. But
3411 it would be better if the diagnostic routines used the source location
3412 embedded in the tree nodes rather than globals. */
3413 if (gimple_has_location (stmt))
3414 input_location = gimple_location (stmt);
3416 expand_gimple_stmt_1 (stmt);
3418 /* Free any temporaries used to evaluate this statement. */
3419 free_temp_slots ();
3421 input_location = saved_location;
3423 /* Mark all insns that may trap. */
3424 lp_nr = lookup_stmt_eh_lp (stmt);
3425 if (lp_nr)
3427 rtx_insn *insn;
3428 for (insn = next_real_insn (last); insn;
3429 insn = next_real_insn (insn))
3431 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3432 /* If we want exceptions for non-call insns, any
3433 may_trap_p instruction may throw. */
3434 && GET_CODE (PATTERN (insn)) != CLOBBER
3435 && GET_CODE (PATTERN (insn)) != USE
3436 && insn_could_throw_p (insn))
3437 make_reg_eh_region_note (insn, 0, lp_nr);
3441 return last;
3444 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3445 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3446 generated a tail call (something that might be denied by the ABI
3447 rules governing the call; see calls.c).
3449 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3450 can still reach the rest of BB. The case here is __builtin_sqrt,
3451 where the NaN result goes through the external function (with a
3452 tailcall) and the normal result happens via a sqrt instruction. */
3454 static basic_block
3455 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3457 rtx_insn *last2, *last;
3458 edge e;
3459 edge_iterator ei;
3460 int probability;
3461 gcov_type count;
3463 last2 = last = expand_gimple_stmt (stmt);
3465 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3466 if (CALL_P (last) && SIBLING_CALL_P (last))
3467 goto found;
3469 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3471 *can_fallthru = true;
3472 return NULL;
3474 found:
3475 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3476 Any instructions emitted here are about to be deleted. */
3477 do_pending_stack_adjust ();
3479 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3480 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3481 EH or abnormal edges, we shouldn't have created a tail call in
3482 the first place. So it seems to me we should just be removing
3483 all edges here, or redirecting the existing fallthru edge to
3484 the exit block. */
3486 probability = 0;
3487 count = 0;
3489 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3491 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3493 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3495 e->dest->count -= e->count;
3496 e->dest->frequency -= EDGE_FREQUENCY (e);
3497 if (e->dest->count < 0)
3498 e->dest->count = 0;
3499 if (e->dest->frequency < 0)
3500 e->dest->frequency = 0;
3502 count += e->count;
3503 probability += e->probability;
3504 remove_edge (e);
3506 else
3507 ei_next (&ei);
3510 /* This is somewhat ugly: the call_expr expander often emits instructions
3511 after the sibcall (to perform the function return). These confuse the
3512 find_many_sub_basic_blocks code, so we need to get rid of these. */
3513 last = NEXT_INSN (last);
3514 gcc_assert (BARRIER_P (last));
3516 *can_fallthru = false;
3517 while (NEXT_INSN (last))
3519 /* For instance an sqrt builtin expander expands if with
3520 sibcall in the then and label for `else`. */
3521 if (LABEL_P (NEXT_INSN (last)))
3523 *can_fallthru = true;
3524 break;
3526 delete_insn (NEXT_INSN (last));
3529 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3530 | EDGE_SIBCALL);
3531 e->probability += probability;
3532 e->count += count;
3533 BB_END (bb) = last;
3534 update_bb_for_insn (bb);
3536 if (NEXT_INSN (last))
3538 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3540 last = BB_END (bb);
3541 if (BARRIER_P (last))
3542 BB_END (bb) = PREV_INSN (last);
3545 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3547 return bb;
3550 /* Return the difference between the floor and the truncated result of
3551 a signed division by OP1 with remainder MOD. */
3552 static rtx
3553 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3555 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3556 return gen_rtx_IF_THEN_ELSE
3557 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3558 gen_rtx_IF_THEN_ELSE
3559 (mode, gen_rtx_LT (BImode,
3560 gen_rtx_DIV (mode, op1, mod),
3561 const0_rtx),
3562 constm1_rtx, const0_rtx),
3563 const0_rtx);
3566 /* Return the difference between the ceil and the truncated result of
3567 a signed division by OP1 with remainder MOD. */
3568 static rtx
3569 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3571 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3572 return gen_rtx_IF_THEN_ELSE
3573 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3574 gen_rtx_IF_THEN_ELSE
3575 (mode, gen_rtx_GT (BImode,
3576 gen_rtx_DIV (mode, op1, mod),
3577 const0_rtx),
3578 const1_rtx, const0_rtx),
3579 const0_rtx);
3582 /* Return the difference between the ceil and the truncated result of
3583 an unsigned division by OP1 with remainder MOD. */
3584 static rtx
3585 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3587 /* (mod != 0 ? 1 : 0) */
3588 return gen_rtx_IF_THEN_ELSE
3589 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3590 const1_rtx, const0_rtx);
3593 /* Return the difference between the rounded and the truncated result
3594 of a signed division by OP1 with remainder MOD. Halfway cases are
3595 rounded away from zero, rather than to the nearest even number. */
3596 static rtx
3597 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3599 /* (abs (mod) >= abs (op1) - abs (mod)
3600 ? (op1 / mod > 0 ? 1 : -1)
3601 : 0) */
3602 return gen_rtx_IF_THEN_ELSE
3603 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3604 gen_rtx_MINUS (mode,
3605 gen_rtx_ABS (mode, op1),
3606 gen_rtx_ABS (mode, mod))),
3607 gen_rtx_IF_THEN_ELSE
3608 (mode, gen_rtx_GT (BImode,
3609 gen_rtx_DIV (mode, op1, mod),
3610 const0_rtx),
3611 const1_rtx, constm1_rtx),
3612 const0_rtx);
3615 /* Return the difference between the rounded and the truncated result
3616 of a unsigned division by OP1 with remainder MOD. Halfway cases
3617 are rounded away from zero, rather than to the nearest even
3618 number. */
3619 static rtx
3620 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3622 /* (mod >= op1 - mod ? 1 : 0) */
3623 return gen_rtx_IF_THEN_ELSE
3624 (mode, gen_rtx_GE (BImode, mod,
3625 gen_rtx_MINUS (mode, op1, mod)),
3626 const1_rtx, const0_rtx);
3629 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3630 any rtl. */
3632 static rtx
3633 convert_debug_memory_address (machine_mode mode, rtx x,
3634 addr_space_t as)
3636 machine_mode xmode = GET_MODE (x);
3638 #ifndef POINTERS_EXTEND_UNSIGNED
3639 gcc_assert (mode == Pmode
3640 || mode == targetm.addr_space.address_mode (as));
3641 gcc_assert (xmode == mode || xmode == VOIDmode);
3642 #else
3643 rtx temp;
3645 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3647 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3648 return x;
3650 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3651 x = simplify_gen_subreg (mode, x, xmode,
3652 subreg_lowpart_offset
3653 (mode, xmode));
3654 else if (POINTERS_EXTEND_UNSIGNED > 0)
3655 x = gen_rtx_ZERO_EXTEND (mode, x);
3656 else if (!POINTERS_EXTEND_UNSIGNED)
3657 x = gen_rtx_SIGN_EXTEND (mode, x);
3658 else
3660 switch (GET_CODE (x))
3662 case SUBREG:
3663 if ((SUBREG_PROMOTED_VAR_P (x)
3664 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3665 || (GET_CODE (SUBREG_REG (x)) == PLUS
3666 && REG_P (XEXP (SUBREG_REG (x), 0))
3667 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3668 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3669 && GET_MODE (SUBREG_REG (x)) == mode)
3670 return SUBREG_REG (x);
3671 break;
3672 case LABEL_REF:
3673 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3674 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3675 return temp;
3676 case SYMBOL_REF:
3677 temp = shallow_copy_rtx (x);
3678 PUT_MODE (temp, mode);
3679 return temp;
3680 case CONST:
3681 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3682 if (temp)
3683 temp = gen_rtx_CONST (mode, temp);
3684 return temp;
3685 case PLUS:
3686 case MINUS:
3687 if (CONST_INT_P (XEXP (x, 1)))
3689 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3690 if (temp)
3691 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3693 break;
3694 default:
3695 break;
3697 /* Don't know how to express ptr_extend as operation in debug info. */
3698 return NULL;
3700 #endif /* POINTERS_EXTEND_UNSIGNED */
3702 return x;
3705 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3706 by avoid_deep_ter_for_debug. */
3708 static hash_map<tree, tree> *deep_ter_debug_map;
3710 /* Split too deep TER chains for debug stmts using debug temporaries. */
3712 static void
3713 avoid_deep_ter_for_debug (gimple stmt, int depth)
3715 use_operand_p use_p;
3716 ssa_op_iter iter;
3717 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
3719 tree use = USE_FROM_PTR (use_p);
3720 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
3721 continue;
3722 gimple g = get_gimple_for_ssa_name (use);
3723 if (g == NULL)
3724 continue;
3725 if (depth > 6 && !stmt_ends_bb_p (g))
3727 if (deep_ter_debug_map == NULL)
3728 deep_ter_debug_map = new hash_map<tree, tree>;
3730 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
3731 if (vexpr != NULL)
3732 continue;
3733 vexpr = make_node (DEBUG_EXPR_DECL);
3734 gimple def_temp = gimple_build_debug_bind (vexpr, use, g);
3735 DECL_ARTIFICIAL (vexpr) = 1;
3736 TREE_TYPE (vexpr) = TREE_TYPE (use);
3737 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
3738 gimple_stmt_iterator gsi = gsi_for_stmt (g);
3739 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
3740 avoid_deep_ter_for_debug (def_temp, 0);
3742 else
3743 avoid_deep_ter_for_debug (g, depth + 1);
3747 /* Return an RTX equivalent to the value of the parameter DECL. */
3749 static rtx
3750 expand_debug_parm_decl (tree decl)
3752 rtx incoming = DECL_INCOMING_RTL (decl);
3754 if (incoming
3755 && GET_MODE (incoming) != BLKmode
3756 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3757 || (MEM_P (incoming)
3758 && REG_P (XEXP (incoming, 0))
3759 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3761 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3763 #ifdef HAVE_window_save
3764 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3765 If the target machine has an explicit window save instruction, the
3766 actual entry value is the corresponding OUTGOING_REGNO instead. */
3767 if (REG_P (incoming)
3768 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3769 incoming
3770 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3771 OUTGOING_REGNO (REGNO (incoming)), 0);
3772 else if (MEM_P (incoming))
3774 rtx reg = XEXP (incoming, 0);
3775 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3777 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3778 incoming = replace_equiv_address_nv (incoming, reg);
3780 else
3781 incoming = copy_rtx (incoming);
3783 #endif
3785 ENTRY_VALUE_EXP (rtl) = incoming;
3786 return rtl;
3789 if (incoming
3790 && GET_MODE (incoming) != BLKmode
3791 && !TREE_ADDRESSABLE (decl)
3792 && MEM_P (incoming)
3793 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3794 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3795 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3796 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3797 return copy_rtx (incoming);
3799 return NULL_RTX;
3802 /* Return an RTX equivalent to the value of the tree expression EXP. */
3804 static rtx
3805 expand_debug_expr (tree exp)
3807 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3808 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3809 machine_mode inner_mode = VOIDmode;
3810 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3811 addr_space_t as;
3813 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3815 case tcc_expression:
3816 switch (TREE_CODE (exp))
3818 case COND_EXPR:
3819 case DOT_PROD_EXPR:
3820 case SAD_EXPR:
3821 case WIDEN_MULT_PLUS_EXPR:
3822 case WIDEN_MULT_MINUS_EXPR:
3823 case FMA_EXPR:
3824 goto ternary;
3826 case TRUTH_ANDIF_EXPR:
3827 case TRUTH_ORIF_EXPR:
3828 case TRUTH_AND_EXPR:
3829 case TRUTH_OR_EXPR:
3830 case TRUTH_XOR_EXPR:
3831 goto binary;
3833 case TRUTH_NOT_EXPR:
3834 goto unary;
3836 default:
3837 break;
3839 break;
3841 ternary:
3842 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3843 if (!op2)
3844 return NULL_RTX;
3845 /* Fall through. */
3847 binary:
3848 case tcc_binary:
3849 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3850 if (!op1)
3851 return NULL_RTX;
3852 switch (TREE_CODE (exp))
3854 case LSHIFT_EXPR:
3855 case RSHIFT_EXPR:
3856 case LROTATE_EXPR:
3857 case RROTATE_EXPR:
3858 case WIDEN_LSHIFT_EXPR:
3859 /* Ensure second operand isn't wider than the first one. */
3860 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
3861 if (SCALAR_INT_MODE_P (inner_mode))
3863 machine_mode opmode = mode;
3864 if (VECTOR_MODE_P (mode))
3865 opmode = GET_MODE_INNER (mode);
3866 if (SCALAR_INT_MODE_P (opmode)
3867 && (GET_MODE_PRECISION (opmode)
3868 < GET_MODE_PRECISION (inner_mode)))
3869 op1 = simplify_gen_subreg (opmode, op1, inner_mode,
3870 subreg_lowpart_offset (opmode,
3871 inner_mode));
3873 break;
3874 default:
3875 break;
3877 /* Fall through. */
3879 unary:
3880 case tcc_unary:
3881 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3882 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3883 if (!op0)
3884 return NULL_RTX;
3885 break;
3887 case tcc_comparison:
3888 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
3889 goto binary;
3891 case tcc_type:
3892 case tcc_statement:
3893 gcc_unreachable ();
3895 case tcc_constant:
3896 case tcc_exceptional:
3897 case tcc_declaration:
3898 case tcc_reference:
3899 case tcc_vl_exp:
3900 break;
3903 switch (TREE_CODE (exp))
3905 case STRING_CST:
3906 if (!lookup_constant_def (exp))
3908 if (strlen (TREE_STRING_POINTER (exp)) + 1
3909 != (size_t) TREE_STRING_LENGTH (exp))
3910 return NULL_RTX;
3911 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3912 op0 = gen_rtx_MEM (BLKmode, op0);
3913 set_mem_attributes (op0, exp, 0);
3914 return op0;
3916 /* Fall through... */
3918 case INTEGER_CST:
3919 case REAL_CST:
3920 case FIXED_CST:
3921 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3922 return op0;
3924 case COMPLEX_CST:
3925 gcc_assert (COMPLEX_MODE_P (mode));
3926 op0 = expand_debug_expr (TREE_REALPART (exp));
3927 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3928 return gen_rtx_CONCAT (mode, op0, op1);
3930 case DEBUG_EXPR_DECL:
3931 op0 = DECL_RTL_IF_SET (exp);
3933 if (op0)
3934 return op0;
3936 op0 = gen_rtx_DEBUG_EXPR (mode);
3937 DEBUG_EXPR_TREE_DECL (op0) = exp;
3938 SET_DECL_RTL (exp, op0);
3940 return op0;
3942 case VAR_DECL:
3943 case PARM_DECL:
3944 case FUNCTION_DECL:
3945 case LABEL_DECL:
3946 case CONST_DECL:
3947 case RESULT_DECL:
3948 op0 = DECL_RTL_IF_SET (exp);
3950 /* This decl was probably optimized away. */
3951 if (!op0)
3953 if (TREE_CODE (exp) != VAR_DECL
3954 || DECL_EXTERNAL (exp)
3955 || !TREE_STATIC (exp)
3956 || !DECL_NAME (exp)
3957 || DECL_HARD_REGISTER (exp)
3958 || DECL_IN_CONSTANT_POOL (exp)
3959 || mode == VOIDmode)
3960 return NULL;
3962 op0 = make_decl_rtl_for_debug (exp);
3963 if (!MEM_P (op0)
3964 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3965 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3966 return NULL;
3968 else
3969 op0 = copy_rtx (op0);
3971 if (GET_MODE (op0) == BLKmode
3972 /* If op0 is not BLKmode, but mode is, adjust_mode
3973 below would ICE. While it is likely a FE bug,
3974 try to be robust here. See PR43166. */
3975 || mode == BLKmode
3976 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3978 gcc_assert (MEM_P (op0));
3979 op0 = adjust_address_nv (op0, mode, 0);
3980 return op0;
3983 /* Fall through. */
3985 adjust_mode:
3986 case PAREN_EXPR:
3987 CASE_CONVERT:
3989 inner_mode = GET_MODE (op0);
3991 if (mode == inner_mode)
3992 return op0;
3994 if (inner_mode == VOIDmode)
3996 if (TREE_CODE (exp) == SSA_NAME)
3997 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3998 else
3999 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4000 if (mode == inner_mode)
4001 return op0;
4004 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4006 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4007 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4008 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4009 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4010 else
4011 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4013 else if (FLOAT_MODE_P (mode))
4015 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4016 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4017 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4018 else
4019 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4021 else if (FLOAT_MODE_P (inner_mode))
4023 if (unsignedp)
4024 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4025 else
4026 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4028 else if (CONSTANT_P (op0)
4029 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4030 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4031 subreg_lowpart_offset (mode,
4032 inner_mode));
4033 else if (UNARY_CLASS_P (exp)
4034 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4035 : unsignedp)
4036 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4037 else
4038 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4040 return op0;
4043 case MEM_REF:
4044 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4046 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4047 TREE_OPERAND (exp, 0),
4048 TREE_OPERAND (exp, 1));
4049 if (newexp)
4050 return expand_debug_expr (newexp);
4052 /* FALLTHROUGH */
4053 case INDIRECT_REF:
4054 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4055 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4056 if (!op0)
4057 return NULL;
4059 if (TREE_CODE (exp) == MEM_REF)
4061 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4062 || (GET_CODE (op0) == PLUS
4063 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4064 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4065 Instead just use get_inner_reference. */
4066 goto component_ref;
4068 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4069 if (!op1 || !CONST_INT_P (op1))
4070 return NULL;
4072 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4075 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4077 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4078 op0, as);
4079 if (op0 == NULL_RTX)
4080 return NULL;
4082 op0 = gen_rtx_MEM (mode, op0);
4083 set_mem_attributes (op0, exp, 0);
4084 if (TREE_CODE (exp) == MEM_REF
4085 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4086 set_mem_expr (op0, NULL_TREE);
4087 set_mem_addr_space (op0, as);
4089 return op0;
4091 case TARGET_MEM_REF:
4092 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4093 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4094 return NULL;
4096 op0 = expand_debug_expr
4097 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4098 if (!op0)
4099 return NULL;
4101 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4102 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4103 op0, as);
4104 if (op0 == NULL_RTX)
4105 return NULL;
4107 op0 = gen_rtx_MEM (mode, op0);
4109 set_mem_attributes (op0, exp, 0);
4110 set_mem_addr_space (op0, as);
4112 return op0;
4114 component_ref:
4115 case ARRAY_REF:
4116 case ARRAY_RANGE_REF:
4117 case COMPONENT_REF:
4118 case BIT_FIELD_REF:
4119 case REALPART_EXPR:
4120 case IMAGPART_EXPR:
4121 case VIEW_CONVERT_EXPR:
4123 machine_mode mode1;
4124 HOST_WIDE_INT bitsize, bitpos;
4125 tree offset;
4126 int volatilep = 0;
4127 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4128 &mode1, &unsignedp, &volatilep, false);
4129 rtx orig_op0;
4131 if (bitsize == 0)
4132 return NULL;
4134 orig_op0 = op0 = expand_debug_expr (tem);
4136 if (!op0)
4137 return NULL;
4139 if (offset)
4141 machine_mode addrmode, offmode;
4143 if (!MEM_P (op0))
4144 return NULL;
4146 op0 = XEXP (op0, 0);
4147 addrmode = GET_MODE (op0);
4148 if (addrmode == VOIDmode)
4149 addrmode = Pmode;
4151 op1 = expand_debug_expr (offset);
4152 if (!op1)
4153 return NULL;
4155 offmode = GET_MODE (op1);
4156 if (offmode == VOIDmode)
4157 offmode = TYPE_MODE (TREE_TYPE (offset));
4159 if (addrmode != offmode)
4160 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4161 subreg_lowpart_offset (addrmode,
4162 offmode));
4164 /* Don't use offset_address here, we don't need a
4165 recognizable address, and we don't want to generate
4166 code. */
4167 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4168 op0, op1));
4171 if (MEM_P (op0))
4173 if (mode1 == VOIDmode)
4174 /* Bitfield. */
4175 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4176 if (bitpos >= BITS_PER_UNIT)
4178 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4179 bitpos %= BITS_PER_UNIT;
4181 else if (bitpos < 0)
4183 HOST_WIDE_INT units
4184 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4185 op0 = adjust_address_nv (op0, mode1, units);
4186 bitpos += units * BITS_PER_UNIT;
4188 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4189 op0 = adjust_address_nv (op0, mode, 0);
4190 else if (GET_MODE (op0) != mode1)
4191 op0 = adjust_address_nv (op0, mode1, 0);
4192 else
4193 op0 = copy_rtx (op0);
4194 if (op0 == orig_op0)
4195 op0 = shallow_copy_rtx (op0);
4196 set_mem_attributes (op0, exp, 0);
4199 if (bitpos == 0 && mode == GET_MODE (op0))
4200 return op0;
4202 if (bitpos < 0)
4203 return NULL;
4205 if (GET_MODE (op0) == BLKmode)
4206 return NULL;
4208 if ((bitpos % BITS_PER_UNIT) == 0
4209 && bitsize == GET_MODE_BITSIZE (mode1))
4211 machine_mode opmode = GET_MODE (op0);
4213 if (opmode == VOIDmode)
4214 opmode = TYPE_MODE (TREE_TYPE (tem));
4216 /* This condition may hold if we're expanding the address
4217 right past the end of an array that turned out not to
4218 be addressable (i.e., the address was only computed in
4219 debug stmts). The gen_subreg below would rightfully
4220 crash, and the address doesn't really exist, so just
4221 drop it. */
4222 if (bitpos >= GET_MODE_BITSIZE (opmode))
4223 return NULL;
4225 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4226 return simplify_gen_subreg (mode, op0, opmode,
4227 bitpos / BITS_PER_UNIT);
4230 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4231 && TYPE_UNSIGNED (TREE_TYPE (exp))
4232 ? SIGN_EXTRACT
4233 : ZERO_EXTRACT, mode,
4234 GET_MODE (op0) != VOIDmode
4235 ? GET_MODE (op0)
4236 : TYPE_MODE (TREE_TYPE (tem)),
4237 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4240 case ABS_EXPR:
4241 return simplify_gen_unary (ABS, mode, op0, mode);
4243 case NEGATE_EXPR:
4244 return simplify_gen_unary (NEG, mode, op0, mode);
4246 case BIT_NOT_EXPR:
4247 return simplify_gen_unary (NOT, mode, op0, mode);
4249 case FLOAT_EXPR:
4250 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4251 0)))
4252 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4253 inner_mode);
4255 case FIX_TRUNC_EXPR:
4256 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4257 inner_mode);
4259 case POINTER_PLUS_EXPR:
4260 /* For the rare target where pointers are not the same size as
4261 size_t, we need to check for mis-matched modes and correct
4262 the addend. */
4263 if (op0 && op1
4264 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4265 && GET_MODE (op0) != GET_MODE (op1))
4267 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4268 /* If OP0 is a partial mode, then we must truncate, even if it has
4269 the same bitsize as OP1 as GCC's representation of partial modes
4270 is opaque. */
4271 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4272 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4273 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4274 GET_MODE (op1));
4275 else
4276 /* We always sign-extend, regardless of the signedness of
4277 the operand, because the operand is always unsigned
4278 here even if the original C expression is signed. */
4279 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4280 GET_MODE (op1));
4282 /* Fall through. */
4283 case PLUS_EXPR:
4284 return simplify_gen_binary (PLUS, mode, op0, op1);
4286 case MINUS_EXPR:
4287 return simplify_gen_binary (MINUS, mode, op0, op1);
4289 case MULT_EXPR:
4290 return simplify_gen_binary (MULT, mode, op0, op1);
4292 case RDIV_EXPR:
4293 case TRUNC_DIV_EXPR:
4294 case EXACT_DIV_EXPR:
4295 if (unsignedp)
4296 return simplify_gen_binary (UDIV, mode, op0, op1);
4297 else
4298 return simplify_gen_binary (DIV, mode, op0, op1);
4300 case TRUNC_MOD_EXPR:
4301 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4303 case FLOOR_DIV_EXPR:
4304 if (unsignedp)
4305 return simplify_gen_binary (UDIV, mode, op0, op1);
4306 else
4308 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4309 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4310 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4311 return simplify_gen_binary (PLUS, mode, div, adj);
4314 case FLOOR_MOD_EXPR:
4315 if (unsignedp)
4316 return simplify_gen_binary (UMOD, mode, op0, op1);
4317 else
4319 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4320 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4321 adj = simplify_gen_unary (NEG, mode,
4322 simplify_gen_binary (MULT, mode, adj, op1),
4323 mode);
4324 return simplify_gen_binary (PLUS, mode, mod, adj);
4327 case CEIL_DIV_EXPR:
4328 if (unsignedp)
4330 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4331 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4332 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4333 return simplify_gen_binary (PLUS, mode, div, adj);
4335 else
4337 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4338 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4339 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4340 return simplify_gen_binary (PLUS, mode, div, adj);
4343 case CEIL_MOD_EXPR:
4344 if (unsignedp)
4346 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4347 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4348 adj = simplify_gen_unary (NEG, mode,
4349 simplify_gen_binary (MULT, mode, adj, op1),
4350 mode);
4351 return simplify_gen_binary (PLUS, mode, mod, adj);
4353 else
4355 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4356 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4357 adj = simplify_gen_unary (NEG, mode,
4358 simplify_gen_binary (MULT, mode, adj, op1),
4359 mode);
4360 return simplify_gen_binary (PLUS, mode, mod, adj);
4363 case ROUND_DIV_EXPR:
4364 if (unsignedp)
4366 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4367 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4368 rtx adj = round_udiv_adjust (mode, mod, op1);
4369 return simplify_gen_binary (PLUS, mode, div, adj);
4371 else
4373 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4374 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4375 rtx adj = round_sdiv_adjust (mode, mod, op1);
4376 return simplify_gen_binary (PLUS, mode, div, adj);
4379 case ROUND_MOD_EXPR:
4380 if (unsignedp)
4382 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4383 rtx adj = round_udiv_adjust (mode, mod, op1);
4384 adj = simplify_gen_unary (NEG, mode,
4385 simplify_gen_binary (MULT, mode, adj, op1),
4386 mode);
4387 return simplify_gen_binary (PLUS, mode, mod, adj);
4389 else
4391 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4392 rtx adj = round_sdiv_adjust (mode, mod, op1);
4393 adj = simplify_gen_unary (NEG, mode,
4394 simplify_gen_binary (MULT, mode, adj, op1),
4395 mode);
4396 return simplify_gen_binary (PLUS, mode, mod, adj);
4399 case LSHIFT_EXPR:
4400 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4402 case RSHIFT_EXPR:
4403 if (unsignedp)
4404 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4405 else
4406 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4408 case LROTATE_EXPR:
4409 return simplify_gen_binary (ROTATE, mode, op0, op1);
4411 case RROTATE_EXPR:
4412 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4414 case MIN_EXPR:
4415 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4417 case MAX_EXPR:
4418 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4420 case BIT_AND_EXPR:
4421 case TRUTH_AND_EXPR:
4422 return simplify_gen_binary (AND, mode, op0, op1);
4424 case BIT_IOR_EXPR:
4425 case TRUTH_OR_EXPR:
4426 return simplify_gen_binary (IOR, mode, op0, op1);
4428 case BIT_XOR_EXPR:
4429 case TRUTH_XOR_EXPR:
4430 return simplify_gen_binary (XOR, mode, op0, op1);
4432 case TRUTH_ANDIF_EXPR:
4433 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4435 case TRUTH_ORIF_EXPR:
4436 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4438 case TRUTH_NOT_EXPR:
4439 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4441 case LT_EXPR:
4442 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4443 op0, op1);
4445 case LE_EXPR:
4446 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4447 op0, op1);
4449 case GT_EXPR:
4450 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4451 op0, op1);
4453 case GE_EXPR:
4454 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4455 op0, op1);
4457 case EQ_EXPR:
4458 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4460 case NE_EXPR:
4461 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4463 case UNORDERED_EXPR:
4464 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4466 case ORDERED_EXPR:
4467 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4469 case UNLT_EXPR:
4470 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4472 case UNLE_EXPR:
4473 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4475 case UNGT_EXPR:
4476 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4478 case UNGE_EXPR:
4479 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4481 case UNEQ_EXPR:
4482 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4484 case LTGT_EXPR:
4485 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4487 case COND_EXPR:
4488 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4490 case COMPLEX_EXPR:
4491 gcc_assert (COMPLEX_MODE_P (mode));
4492 if (GET_MODE (op0) == VOIDmode)
4493 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4494 if (GET_MODE (op1) == VOIDmode)
4495 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4496 return gen_rtx_CONCAT (mode, op0, op1);
4498 case CONJ_EXPR:
4499 if (GET_CODE (op0) == CONCAT)
4500 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4501 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4502 XEXP (op0, 1),
4503 GET_MODE_INNER (mode)));
4504 else
4506 machine_mode imode = GET_MODE_INNER (mode);
4507 rtx re, im;
4509 if (MEM_P (op0))
4511 re = adjust_address_nv (op0, imode, 0);
4512 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4514 else
4516 machine_mode ifmode = int_mode_for_mode (mode);
4517 machine_mode ihmode = int_mode_for_mode (imode);
4518 rtx halfsize;
4519 if (ifmode == BLKmode || ihmode == BLKmode)
4520 return NULL;
4521 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4522 re = op0;
4523 if (mode != ifmode)
4524 re = gen_rtx_SUBREG (ifmode, re, 0);
4525 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4526 if (imode != ihmode)
4527 re = gen_rtx_SUBREG (imode, re, 0);
4528 im = copy_rtx (op0);
4529 if (mode != ifmode)
4530 im = gen_rtx_SUBREG (ifmode, im, 0);
4531 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4532 if (imode != ihmode)
4533 im = gen_rtx_SUBREG (imode, im, 0);
4535 im = gen_rtx_NEG (imode, im);
4536 return gen_rtx_CONCAT (mode, re, im);
4539 case ADDR_EXPR:
4540 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4541 if (!op0 || !MEM_P (op0))
4543 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4544 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4545 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4546 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4547 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4548 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4550 if (handled_component_p (TREE_OPERAND (exp, 0)))
4552 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4553 tree decl
4554 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4555 &bitoffset, &bitsize, &maxsize);
4556 if ((TREE_CODE (decl) == VAR_DECL
4557 || TREE_CODE (decl) == PARM_DECL
4558 || TREE_CODE (decl) == RESULT_DECL)
4559 && (!TREE_ADDRESSABLE (decl)
4560 || target_for_debug_bind (decl))
4561 && (bitoffset % BITS_PER_UNIT) == 0
4562 && bitsize > 0
4563 && bitsize == maxsize)
4565 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4566 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4570 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4571 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4572 == ADDR_EXPR)
4574 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4575 0));
4576 if (op0 != NULL
4577 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4578 || (GET_CODE (op0) == PLUS
4579 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4580 && CONST_INT_P (XEXP (op0, 1)))))
4582 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4583 1));
4584 if (!op1 || !CONST_INT_P (op1))
4585 return NULL;
4587 return plus_constant (mode, op0, INTVAL (op1));
4591 return NULL;
4594 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4595 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4597 return op0;
4599 case VECTOR_CST:
4601 unsigned i;
4603 op0 = gen_rtx_CONCATN
4604 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4606 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4608 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4609 if (!op1)
4610 return NULL;
4611 XVECEXP (op0, 0, i) = op1;
4614 return op0;
4617 case CONSTRUCTOR:
4618 if (TREE_CLOBBER_P (exp))
4619 return NULL;
4620 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4622 unsigned i;
4623 tree val;
4625 op0 = gen_rtx_CONCATN
4626 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4628 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4630 op1 = expand_debug_expr (val);
4631 if (!op1)
4632 return NULL;
4633 XVECEXP (op0, 0, i) = op1;
4636 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4638 op1 = expand_debug_expr
4639 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4641 if (!op1)
4642 return NULL;
4644 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4645 XVECEXP (op0, 0, i) = op1;
4648 return op0;
4650 else
4651 goto flag_unsupported;
4653 case CALL_EXPR:
4654 /* ??? Maybe handle some builtins? */
4655 return NULL;
4657 case SSA_NAME:
4659 gimple g = get_gimple_for_ssa_name (exp);
4660 if (g)
4662 tree t = NULL_TREE;
4663 if (deep_ter_debug_map)
4665 tree *slot = deep_ter_debug_map->get (exp);
4666 if (slot)
4667 t = *slot;
4669 if (t == NULL_TREE)
4670 t = gimple_assign_rhs_to_tree (g);
4671 op0 = expand_debug_expr (t);
4672 if (!op0)
4673 return NULL;
4675 else
4677 int part = var_to_partition (SA.map, exp);
4679 if (part == NO_PARTITION)
4681 /* If this is a reference to an incoming value of parameter
4682 that is never used in the code or where the incoming
4683 value is never used in the code, use PARM_DECL's
4684 DECL_RTL if set. */
4685 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4686 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4688 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4689 if (op0)
4690 goto adjust_mode;
4691 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4692 if (op0)
4693 goto adjust_mode;
4695 return NULL;
4698 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4700 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4702 goto adjust_mode;
4705 case ERROR_MARK:
4706 return NULL;
4708 /* Vector stuff. For most of the codes we don't have rtl codes. */
4709 case REALIGN_LOAD_EXPR:
4710 case REDUC_MAX_EXPR:
4711 case REDUC_MIN_EXPR:
4712 case REDUC_PLUS_EXPR:
4713 case VEC_COND_EXPR:
4714 case VEC_PACK_FIX_TRUNC_EXPR:
4715 case VEC_PACK_SAT_EXPR:
4716 case VEC_PACK_TRUNC_EXPR:
4717 case VEC_UNPACK_FLOAT_HI_EXPR:
4718 case VEC_UNPACK_FLOAT_LO_EXPR:
4719 case VEC_UNPACK_HI_EXPR:
4720 case VEC_UNPACK_LO_EXPR:
4721 case VEC_WIDEN_MULT_HI_EXPR:
4722 case VEC_WIDEN_MULT_LO_EXPR:
4723 case VEC_WIDEN_MULT_EVEN_EXPR:
4724 case VEC_WIDEN_MULT_ODD_EXPR:
4725 case VEC_WIDEN_LSHIFT_HI_EXPR:
4726 case VEC_WIDEN_LSHIFT_LO_EXPR:
4727 case VEC_PERM_EXPR:
4728 return NULL;
4730 /* Misc codes. */
4731 case ADDR_SPACE_CONVERT_EXPR:
4732 case FIXED_CONVERT_EXPR:
4733 case OBJ_TYPE_REF:
4734 case WITH_SIZE_EXPR:
4735 return NULL;
4737 case DOT_PROD_EXPR:
4738 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4739 && SCALAR_INT_MODE_P (mode))
4742 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4743 0)))
4744 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4745 inner_mode);
4747 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4748 1)))
4749 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4750 inner_mode);
4751 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4752 return simplify_gen_binary (PLUS, mode, op0, op2);
4754 return NULL;
4756 case WIDEN_MULT_EXPR:
4757 case WIDEN_MULT_PLUS_EXPR:
4758 case WIDEN_MULT_MINUS_EXPR:
4759 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4760 && SCALAR_INT_MODE_P (mode))
4762 inner_mode = GET_MODE (op0);
4763 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4764 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4765 else
4766 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4767 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4768 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4769 else
4770 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4771 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4772 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4773 return op0;
4774 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4775 return simplify_gen_binary (PLUS, mode, op0, op2);
4776 else
4777 return simplify_gen_binary (MINUS, mode, op2, op0);
4779 return NULL;
4781 case MULT_HIGHPART_EXPR:
4782 /* ??? Similar to the above. */
4783 return NULL;
4785 case WIDEN_SUM_EXPR:
4786 case WIDEN_LSHIFT_EXPR:
4787 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4788 && SCALAR_INT_MODE_P (mode))
4791 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4792 0)))
4793 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4794 inner_mode);
4795 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4796 ? ASHIFT : PLUS, mode, op0, op1);
4798 return NULL;
4800 case FMA_EXPR:
4801 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4803 default:
4804 flag_unsupported:
4805 #ifdef ENABLE_CHECKING
4806 debug_tree (exp);
4807 gcc_unreachable ();
4808 #else
4809 return NULL;
4810 #endif
4814 /* Return an RTX equivalent to the source bind value of the tree expression
4815 EXP. */
4817 static rtx
4818 expand_debug_source_expr (tree exp)
4820 rtx op0 = NULL_RTX;
4821 machine_mode mode = VOIDmode, inner_mode;
4823 switch (TREE_CODE (exp))
4825 case PARM_DECL:
4827 mode = DECL_MODE (exp);
4828 op0 = expand_debug_parm_decl (exp);
4829 if (op0)
4830 break;
4831 /* See if this isn't an argument that has been completely
4832 optimized out. */
4833 if (!DECL_RTL_SET_P (exp)
4834 && !DECL_INCOMING_RTL (exp)
4835 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4837 tree aexp = DECL_ORIGIN (exp);
4838 if (DECL_CONTEXT (aexp)
4839 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4841 vec<tree, va_gc> **debug_args;
4842 unsigned int ix;
4843 tree ddecl;
4844 debug_args = decl_debug_args_lookup (current_function_decl);
4845 if (debug_args != NULL)
4847 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4848 ix += 2)
4849 if (ddecl == aexp)
4850 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4854 break;
4856 default:
4857 break;
4860 if (op0 == NULL_RTX)
4861 return NULL_RTX;
4863 inner_mode = GET_MODE (op0);
4864 if (mode == inner_mode)
4865 return op0;
4867 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4869 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4870 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4871 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4872 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4873 else
4874 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4876 else if (FLOAT_MODE_P (mode))
4877 gcc_unreachable ();
4878 else if (FLOAT_MODE_P (inner_mode))
4880 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4881 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4882 else
4883 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4885 else if (CONSTANT_P (op0)
4886 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4887 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4888 subreg_lowpart_offset (mode, inner_mode));
4889 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4890 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4891 else
4892 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4894 return op0;
4897 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4898 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4899 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4901 static void
4902 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4904 rtx exp = *exp_p;
4906 if (exp == NULL_RTX)
4907 return;
4909 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4910 return;
4912 if (depth == 4)
4914 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4915 rtx dval = make_debug_expr_from_rtl (exp);
4917 /* Emit a debug bind insn before INSN. */
4918 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4919 DEBUG_EXPR_TREE_DECL (dval), exp,
4920 VAR_INIT_STATUS_INITIALIZED);
4922 emit_debug_insn_before (bind, insn);
4923 *exp_p = dval;
4924 return;
4927 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4928 int i, j;
4929 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4930 switch (*format_ptr++)
4932 case 'e':
4933 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4934 break;
4936 case 'E':
4937 case 'V':
4938 for (j = 0; j < XVECLEN (exp, i); j++)
4939 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4940 break;
4942 default:
4943 break;
4947 /* Expand the _LOCs in debug insns. We run this after expanding all
4948 regular insns, so that any variables referenced in the function
4949 will have their DECL_RTLs set. */
4951 static void
4952 expand_debug_locations (void)
4954 rtx_insn *insn;
4955 rtx_insn *last = get_last_insn ();
4956 int save_strict_alias = flag_strict_aliasing;
4958 /* New alias sets while setting up memory attributes cause
4959 -fcompare-debug failures, even though it doesn't bring about any
4960 codegen changes. */
4961 flag_strict_aliasing = 0;
4963 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4964 if (DEBUG_INSN_P (insn))
4966 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4967 rtx val;
4968 rtx_insn *prev_insn, *insn2;
4969 machine_mode mode;
4971 if (value == NULL_TREE)
4972 val = NULL_RTX;
4973 else
4975 if (INSN_VAR_LOCATION_STATUS (insn)
4976 == VAR_INIT_STATUS_UNINITIALIZED)
4977 val = expand_debug_source_expr (value);
4978 /* The avoid_deep_ter_for_debug function inserts
4979 debug bind stmts after SSA_NAME definition, with the
4980 SSA_NAME as the whole bind location. Disable temporarily
4981 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
4982 being defined in this DEBUG_INSN. */
4983 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
4985 tree *slot = deep_ter_debug_map->get (value);
4986 if (slot)
4988 if (*slot == INSN_VAR_LOCATION_DECL (insn))
4989 *slot = NULL_TREE;
4990 else
4991 slot = NULL;
4993 val = expand_debug_expr (value);
4994 if (slot)
4995 *slot = INSN_VAR_LOCATION_DECL (insn);
4997 else
4998 val = expand_debug_expr (value);
4999 gcc_assert (last == get_last_insn ());
5002 if (!val)
5003 val = gen_rtx_UNKNOWN_VAR_LOC ();
5004 else
5006 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5008 gcc_assert (mode == GET_MODE (val)
5009 || (GET_MODE (val) == VOIDmode
5010 && (CONST_SCALAR_INT_P (val)
5011 || GET_CODE (val) == CONST_FIXED
5012 || GET_CODE (val) == LABEL_REF)));
5015 INSN_VAR_LOCATION_LOC (insn) = val;
5016 prev_insn = PREV_INSN (insn);
5017 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5018 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5021 flag_strict_aliasing = save_strict_alias;
5024 /* Performs swapping operands of commutative operations to expand
5025 the expensive one first. */
5027 static void
5028 reorder_operands (basic_block bb)
5030 unsigned int *lattice; /* Hold cost of each statement. */
5031 unsigned int i = 0, n = 0;
5032 gimple_stmt_iterator gsi;
5033 gimple_seq stmts;
5034 gimple stmt;
5035 bool swap;
5036 tree op0, op1;
5037 ssa_op_iter iter;
5038 use_operand_p use_p;
5039 gimple def0, def1;
5041 /* Compute cost of each statement using estimate_num_insns. */
5042 stmts = bb_seq (bb);
5043 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5045 stmt = gsi_stmt (gsi);
5046 if (!is_gimple_debug (stmt))
5047 gimple_set_uid (stmt, n++);
5049 lattice = XNEWVEC (unsigned int, n);
5050 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5052 unsigned cost;
5053 stmt = gsi_stmt (gsi);
5054 if (is_gimple_debug (stmt))
5055 continue;
5056 cost = estimate_num_insns (stmt, &eni_size_weights);
5057 lattice[i] = cost;
5058 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5060 tree use = USE_FROM_PTR (use_p);
5061 gimple def_stmt;
5062 if (TREE_CODE (use) != SSA_NAME)
5063 continue;
5064 def_stmt = get_gimple_for_ssa_name (use);
5065 if (!def_stmt)
5066 continue;
5067 lattice[i] += lattice[gimple_uid (def_stmt)];
5069 i++;
5070 if (!is_gimple_assign (stmt)
5071 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5072 continue;
5073 op0 = gimple_op (stmt, 1);
5074 op1 = gimple_op (stmt, 2);
5075 if (TREE_CODE (op0) != SSA_NAME
5076 || TREE_CODE (op1) != SSA_NAME)
5077 continue;
5078 /* Swap operands if the second one is more expensive. */
5079 def0 = get_gimple_for_ssa_name (op0);
5080 def1 = get_gimple_for_ssa_name (op1);
5081 if (!def1)
5082 continue;
5083 swap = false;
5084 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5085 swap = true;
5086 if (swap)
5088 if (dump_file && (dump_flags & TDF_DETAILS))
5090 fprintf (dump_file, "Swap operands in stmt:\n");
5091 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5092 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5093 def0 ? lattice[gimple_uid (def0)] : 0,
5094 lattice[gimple_uid (def1)]);
5096 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5097 gimple_assign_rhs2_ptr (stmt));
5100 XDELETE (lattice);
5103 /* Expand basic block BB from GIMPLE trees to RTL. */
5105 static basic_block
5106 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5108 gimple_stmt_iterator gsi;
5109 gimple_seq stmts;
5110 gimple stmt = NULL;
5111 rtx_note *note;
5112 rtx_insn *last;
5113 edge e;
5114 edge_iterator ei;
5116 if (dump_file)
5117 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5118 bb->index);
5120 /* Note that since we are now transitioning from GIMPLE to RTL, we
5121 cannot use the gsi_*_bb() routines because they expect the basic
5122 block to be in GIMPLE, instead of RTL. Therefore, we need to
5123 access the BB sequence directly. */
5124 if (optimize)
5125 reorder_operands (bb);
5126 stmts = bb_seq (bb);
5127 bb->il.gimple.seq = NULL;
5128 bb->il.gimple.phi_nodes = NULL;
5129 rtl_profile_for_bb (bb);
5130 init_rtl_bb_info (bb);
5131 bb->flags |= BB_RTL;
5133 /* Remove the RETURN_EXPR if we may fall though to the exit
5134 instead. */
5135 gsi = gsi_last (stmts);
5136 if (!gsi_end_p (gsi)
5137 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5139 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5141 gcc_assert (single_succ_p (bb));
5142 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5144 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5145 && !gimple_return_retval (ret_stmt))
5147 gsi_remove (&gsi, false);
5148 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5152 gsi = gsi_start (stmts);
5153 if (!gsi_end_p (gsi))
5155 stmt = gsi_stmt (gsi);
5156 if (gimple_code (stmt) != GIMPLE_LABEL)
5157 stmt = NULL;
5160 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5162 if (stmt || elt)
5164 last = get_last_insn ();
5166 if (stmt)
5168 expand_gimple_stmt (stmt);
5169 gsi_next (&gsi);
5172 if (elt)
5173 emit_label (*elt);
5175 /* Java emits line number notes in the top of labels.
5176 ??? Make this go away once line number notes are obsoleted. */
5177 BB_HEAD (bb) = NEXT_INSN (last);
5178 if (NOTE_P (BB_HEAD (bb)))
5179 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5180 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5182 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5184 else
5185 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5187 NOTE_BASIC_BLOCK (note) = bb;
5189 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5191 basic_block new_bb;
5193 stmt = gsi_stmt (gsi);
5195 /* If this statement is a non-debug one, and we generate debug
5196 insns, then this one might be the last real use of a TERed
5197 SSA_NAME, but where there are still some debug uses further
5198 down. Expanding the current SSA name in such further debug
5199 uses by their RHS might lead to wrong debug info, as coalescing
5200 might make the operands of such RHS be placed into the same
5201 pseudo as something else. Like so:
5202 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5203 use(a_1);
5204 a_2 = ...
5205 #DEBUG ... => a_1
5206 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5207 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5208 the write to a_2 would actually have clobbered the place which
5209 formerly held a_0.
5211 So, instead of that, we recognize the situation, and generate
5212 debug temporaries at the last real use of TERed SSA names:
5213 a_1 = a_0 + 1;
5214 #DEBUG #D1 => a_1
5215 use(a_1);
5216 a_2 = ...
5217 #DEBUG ... => #D1
5219 if (MAY_HAVE_DEBUG_INSNS
5220 && SA.values
5221 && !is_gimple_debug (stmt))
5223 ssa_op_iter iter;
5224 tree op;
5225 gimple def;
5227 location_t sloc = curr_insn_location ();
5229 /* Look for SSA names that have their last use here (TERed
5230 names always have only one real use). */
5231 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5232 if ((def = get_gimple_for_ssa_name (op)))
5234 imm_use_iterator imm_iter;
5235 use_operand_p use_p;
5236 bool have_debug_uses = false;
5238 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5240 if (gimple_debug_bind_p (USE_STMT (use_p)))
5242 have_debug_uses = true;
5243 break;
5247 if (have_debug_uses)
5249 /* OP is a TERed SSA name, with DEF its defining
5250 statement, and where OP is used in further debug
5251 instructions. Generate a debug temporary, and
5252 replace all uses of OP in debug insns with that
5253 temporary. */
5254 gimple debugstmt;
5255 tree value = gimple_assign_rhs_to_tree (def);
5256 tree vexpr = make_node (DEBUG_EXPR_DECL);
5257 rtx val;
5258 machine_mode mode;
5260 set_curr_insn_location (gimple_location (def));
5262 DECL_ARTIFICIAL (vexpr) = 1;
5263 TREE_TYPE (vexpr) = TREE_TYPE (value);
5264 if (DECL_P (value))
5265 mode = DECL_MODE (value);
5266 else
5267 mode = TYPE_MODE (TREE_TYPE (value));
5268 DECL_MODE (vexpr) = mode;
5270 val = gen_rtx_VAR_LOCATION
5271 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5273 emit_debug_insn (val);
5275 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5277 if (!gimple_debug_bind_p (debugstmt))
5278 continue;
5280 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5281 SET_USE (use_p, vexpr);
5283 update_stmt (debugstmt);
5287 set_curr_insn_location (sloc);
5290 currently_expanding_gimple_stmt = stmt;
5292 /* Expand this statement, then evaluate the resulting RTL and
5293 fixup the CFG accordingly. */
5294 if (gimple_code (stmt) == GIMPLE_COND)
5296 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5297 if (new_bb)
5298 return new_bb;
5300 else if (gimple_debug_bind_p (stmt))
5302 location_t sloc = curr_insn_location ();
5303 gimple_stmt_iterator nsi = gsi;
5305 for (;;)
5307 tree var = gimple_debug_bind_get_var (stmt);
5308 tree value;
5309 rtx val;
5310 machine_mode mode;
5312 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5313 && TREE_CODE (var) != LABEL_DECL
5314 && !target_for_debug_bind (var))
5315 goto delink_debug_stmt;
5317 if (gimple_debug_bind_has_value_p (stmt))
5318 value = gimple_debug_bind_get_value (stmt);
5319 else
5320 value = NULL_TREE;
5322 last = get_last_insn ();
5324 set_curr_insn_location (gimple_location (stmt));
5326 if (DECL_P (var))
5327 mode = DECL_MODE (var);
5328 else
5329 mode = TYPE_MODE (TREE_TYPE (var));
5331 val = gen_rtx_VAR_LOCATION
5332 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5334 emit_debug_insn (val);
5336 if (dump_file && (dump_flags & TDF_DETAILS))
5338 /* We can't dump the insn with a TREE where an RTX
5339 is expected. */
5340 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5341 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5342 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5345 delink_debug_stmt:
5346 /* In order not to generate too many debug temporaries,
5347 we delink all uses of debug statements we already expanded.
5348 Therefore debug statements between definition and real
5349 use of TERed SSA names will continue to use the SSA name,
5350 and not be replaced with debug temps. */
5351 delink_stmt_imm_use (stmt);
5353 gsi = nsi;
5354 gsi_next (&nsi);
5355 if (gsi_end_p (nsi))
5356 break;
5357 stmt = gsi_stmt (nsi);
5358 if (!gimple_debug_bind_p (stmt))
5359 break;
5362 set_curr_insn_location (sloc);
5364 else if (gimple_debug_source_bind_p (stmt))
5366 location_t sloc = curr_insn_location ();
5367 tree var = gimple_debug_source_bind_get_var (stmt);
5368 tree value = gimple_debug_source_bind_get_value (stmt);
5369 rtx val;
5370 machine_mode mode;
5372 last = get_last_insn ();
5374 set_curr_insn_location (gimple_location (stmt));
5376 mode = DECL_MODE (var);
5378 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5379 VAR_INIT_STATUS_UNINITIALIZED);
5381 emit_debug_insn (val);
5383 if (dump_file && (dump_flags & TDF_DETAILS))
5385 /* We can't dump the insn with a TREE where an RTX
5386 is expected. */
5387 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5388 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5389 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5392 set_curr_insn_location (sloc);
5394 else
5396 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5397 if (call_stmt
5398 && gimple_call_tail_p (call_stmt)
5399 && disable_tail_calls)
5400 gimple_call_set_tail (call_stmt, false);
5402 if (call_stmt && gimple_call_tail_p (call_stmt))
5404 bool can_fallthru;
5405 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5406 if (new_bb)
5408 if (can_fallthru)
5409 bb = new_bb;
5410 else
5411 return new_bb;
5414 else
5416 def_operand_p def_p;
5417 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5419 if (def_p != NULL)
5421 /* Ignore this stmt if it is in the list of
5422 replaceable expressions. */
5423 if (SA.values
5424 && bitmap_bit_p (SA.values,
5425 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5426 continue;
5428 last = expand_gimple_stmt (stmt);
5429 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5434 currently_expanding_gimple_stmt = NULL;
5436 /* Expand implicit goto and convert goto_locus. */
5437 FOR_EACH_EDGE (e, ei, bb->succs)
5439 if (e->goto_locus != UNKNOWN_LOCATION)
5440 set_curr_insn_location (e->goto_locus);
5441 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5443 emit_jump (label_rtx_for_bb (e->dest));
5444 e->flags &= ~EDGE_FALLTHRU;
5448 /* Expanded RTL can create a jump in the last instruction of block.
5449 This later might be assumed to be a jump to successor and break edge insertion.
5450 We need to insert dummy move to prevent this. PR41440. */
5451 if (single_succ_p (bb)
5452 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5453 && (last = get_last_insn ())
5454 && JUMP_P (last))
5456 rtx dummy = gen_reg_rtx (SImode);
5457 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5460 do_pending_stack_adjust ();
5462 /* Find the block tail. The last insn in the block is the insn
5463 before a barrier and/or table jump insn. */
5464 last = get_last_insn ();
5465 if (BARRIER_P (last))
5466 last = PREV_INSN (last);
5467 if (JUMP_TABLE_DATA_P (last))
5468 last = PREV_INSN (PREV_INSN (last));
5469 BB_END (bb) = last;
5471 update_bb_for_insn (bb);
5473 return bb;
5477 /* Create a basic block for initialization code. */
5479 static basic_block
5480 construct_init_block (void)
5482 basic_block init_block, first_block;
5483 edge e = NULL;
5484 int flags;
5486 /* Multiple entry points not supported yet. */
5487 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5488 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5489 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5490 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5491 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5493 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5495 /* When entry edge points to first basic block, we don't need jump,
5496 otherwise we have to jump into proper target. */
5497 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5499 tree label = gimple_block_label (e->dest);
5501 emit_jump (jump_target_rtx (label));
5502 flags = 0;
5504 else
5505 flags = EDGE_FALLTHRU;
5507 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5508 get_last_insn (),
5509 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5510 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5511 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5512 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5513 if (e)
5515 first_block = e->dest;
5516 redirect_edge_succ (e, init_block);
5517 e = make_edge (init_block, first_block, flags);
5519 else
5520 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5521 e->probability = REG_BR_PROB_BASE;
5522 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5524 update_bb_for_insn (init_block);
5525 return init_block;
5528 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5529 found in the block tree. */
5531 static void
5532 set_block_levels (tree block, int level)
5534 while (block)
5536 BLOCK_NUMBER (block) = level;
5537 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5538 block = BLOCK_CHAIN (block);
5542 /* Create a block containing landing pads and similar stuff. */
5544 static void
5545 construct_exit_block (void)
5547 rtx_insn *head = get_last_insn ();
5548 rtx_insn *end;
5549 basic_block exit_block;
5550 edge e, e2;
5551 unsigned ix;
5552 edge_iterator ei;
5553 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5554 rtx_insn *orig_end = BB_END (prev_bb);
5556 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5558 /* Make sure the locus is set to the end of the function, so that
5559 epilogue line numbers and warnings are set properly. */
5560 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5561 input_location = cfun->function_end_locus;
5563 /* Generate rtl for function exit. */
5564 expand_function_end ();
5566 end = get_last_insn ();
5567 if (head == end)
5568 return;
5569 /* While emitting the function end we could move end of the last basic
5570 block. */
5571 BB_END (prev_bb) = orig_end;
5572 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5573 head = NEXT_INSN (head);
5574 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5575 bb frequency counting will be confused. Any instructions before that
5576 label are emitted for the case where PREV_BB falls through into the
5577 exit block, so append those instructions to prev_bb in that case. */
5578 if (NEXT_INSN (head) != return_label)
5580 while (NEXT_INSN (head) != return_label)
5582 if (!NOTE_P (NEXT_INSN (head)))
5583 BB_END (prev_bb) = NEXT_INSN (head);
5584 head = NEXT_INSN (head);
5587 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5588 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5589 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5590 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5592 ix = 0;
5593 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5595 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5596 if (!(e->flags & EDGE_ABNORMAL))
5597 redirect_edge_succ (e, exit_block);
5598 else
5599 ix++;
5602 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5603 e->probability = REG_BR_PROB_BASE;
5604 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5605 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5606 if (e2 != e)
5608 e->count -= e2->count;
5609 exit_block->count -= e2->count;
5610 exit_block->frequency -= EDGE_FREQUENCY (e2);
5612 if (e->count < 0)
5613 e->count = 0;
5614 if (exit_block->count < 0)
5615 exit_block->count = 0;
5616 if (exit_block->frequency < 0)
5617 exit_block->frequency = 0;
5618 update_bb_for_insn (exit_block);
5621 /* Helper function for discover_nonconstant_array_refs.
5622 Look for ARRAY_REF nodes with non-constant indexes and mark them
5623 addressable. */
5625 static tree
5626 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5627 void *data ATTRIBUTE_UNUSED)
5629 tree t = *tp;
5631 if (IS_TYPE_OR_DECL_P (t))
5632 *walk_subtrees = 0;
5633 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5635 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5636 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5637 && (!TREE_OPERAND (t, 2)
5638 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5639 || (TREE_CODE (t) == COMPONENT_REF
5640 && (!TREE_OPERAND (t,2)
5641 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5642 || TREE_CODE (t) == BIT_FIELD_REF
5643 || TREE_CODE (t) == REALPART_EXPR
5644 || TREE_CODE (t) == IMAGPART_EXPR
5645 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5646 || CONVERT_EXPR_P (t))
5647 t = TREE_OPERAND (t, 0);
5649 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5651 t = get_base_address (t);
5652 if (t && DECL_P (t)
5653 && DECL_MODE (t) != BLKmode)
5654 TREE_ADDRESSABLE (t) = 1;
5657 *walk_subtrees = 0;
5660 return NULL_TREE;
5663 /* RTL expansion is not able to compile array references with variable
5664 offsets for arrays stored in single register. Discover such
5665 expressions and mark variables as addressable to avoid this
5666 scenario. */
5668 static void
5669 discover_nonconstant_array_refs (void)
5671 basic_block bb;
5672 gimple_stmt_iterator gsi;
5674 FOR_EACH_BB_FN (bb, cfun)
5675 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5677 gimple stmt = gsi_stmt (gsi);
5678 if (!is_gimple_debug (stmt))
5679 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5683 /* This function sets crtl->args.internal_arg_pointer to a virtual
5684 register if DRAP is needed. Local register allocator will replace
5685 virtual_incoming_args_rtx with the virtual register. */
5687 static void
5688 expand_stack_alignment (void)
5690 rtx drap_rtx;
5691 unsigned int preferred_stack_boundary;
5693 if (! SUPPORTS_STACK_ALIGNMENT)
5694 return;
5696 if (cfun->calls_alloca
5697 || cfun->has_nonlocal_label
5698 || crtl->has_nonlocal_goto)
5699 crtl->need_drap = true;
5701 /* Call update_stack_boundary here again to update incoming stack
5702 boundary. It may set incoming stack alignment to a different
5703 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5704 use the minimum incoming stack alignment to check if it is OK
5705 to perform sibcall optimization since sibcall optimization will
5706 only align the outgoing stack to incoming stack boundary. */
5707 if (targetm.calls.update_stack_boundary)
5708 targetm.calls.update_stack_boundary ();
5710 /* The incoming stack frame has to be aligned at least at
5711 parm_stack_boundary. */
5712 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5714 /* Update crtl->stack_alignment_estimated and use it later to align
5715 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5716 exceptions since callgraph doesn't collect incoming stack alignment
5717 in this case. */
5718 if (cfun->can_throw_non_call_exceptions
5719 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5720 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5721 else
5722 preferred_stack_boundary = crtl->preferred_stack_boundary;
5723 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5724 crtl->stack_alignment_estimated = preferred_stack_boundary;
5725 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5726 crtl->stack_alignment_needed = preferred_stack_boundary;
5728 gcc_assert (crtl->stack_alignment_needed
5729 <= crtl->stack_alignment_estimated);
5731 crtl->stack_realign_needed
5732 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5733 crtl->stack_realign_tried = crtl->stack_realign_needed;
5735 crtl->stack_realign_processed = true;
5737 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5738 alignment. */
5739 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5740 drap_rtx = targetm.calls.get_drap_rtx ();
5742 /* stack_realign_drap and drap_rtx must match. */
5743 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5745 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5746 if (NULL != drap_rtx)
5748 crtl->args.internal_arg_pointer = drap_rtx;
5750 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5751 needed. */
5752 fixup_tail_calls ();
5757 static void
5758 expand_main_function (void)
5760 #if (defined(INVOKE__main) \
5761 || (!defined(HAS_INIT_SECTION) \
5762 && !defined(INIT_SECTION_ASM_OP) \
5763 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5764 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5765 #endif
5769 /* Expand code to initialize the stack_protect_guard. This is invoked at
5770 the beginning of a function to be protected. */
5772 #ifndef HAVE_stack_protect_set
5773 # define HAVE_stack_protect_set 0
5774 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5775 #endif
5777 static void
5778 stack_protect_prologue (void)
5780 tree guard_decl = targetm.stack_protect_guard ();
5781 rtx x, y;
5783 x = expand_normal (crtl->stack_protect_guard);
5784 y = expand_normal (guard_decl);
5786 /* Allow the target to copy from Y to X without leaking Y into a
5787 register. */
5788 if (HAVE_stack_protect_set)
5790 rtx insn = gen_stack_protect_set (x, y);
5791 if (insn)
5793 emit_insn (insn);
5794 return;
5798 /* Otherwise do a straight move. */
5799 emit_move_insn (x, y);
5802 /* Translate the intermediate representation contained in the CFG
5803 from GIMPLE trees to RTL.
5805 We do conversion per basic block and preserve/update the tree CFG.
5806 This implies we have to do some magic as the CFG can simultaneously
5807 consist of basic blocks containing RTL and GIMPLE trees. This can
5808 confuse the CFG hooks, so be careful to not manipulate CFG during
5809 the expansion. */
5811 namespace {
5813 const pass_data pass_data_expand =
5815 RTL_PASS, /* type */
5816 "expand", /* name */
5817 OPTGROUP_NONE, /* optinfo_flags */
5818 TV_EXPAND, /* tv_id */
5819 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5820 | PROP_gimple_lcx
5821 | PROP_gimple_lvec
5822 | PROP_gimple_lva), /* properties_required */
5823 PROP_rtl, /* properties_provided */
5824 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5825 0, /* todo_flags_start */
5826 0, /* todo_flags_finish */
5829 class pass_expand : public rtl_opt_pass
5831 public:
5832 pass_expand (gcc::context *ctxt)
5833 : rtl_opt_pass (pass_data_expand, ctxt)
5836 /* opt_pass methods: */
5837 virtual unsigned int execute (function *);
5839 }; // class pass_expand
5841 unsigned int
5842 pass_expand::execute (function *fun)
5844 basic_block bb, init_block;
5845 sbitmap blocks;
5846 edge_iterator ei;
5847 edge e;
5848 rtx_insn *var_seq, *var_ret_seq;
5849 unsigned i;
5851 timevar_push (TV_OUT_OF_SSA);
5852 rewrite_out_of_ssa (&SA);
5853 timevar_pop (TV_OUT_OF_SSA);
5854 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5856 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
5858 gimple_stmt_iterator gsi;
5859 FOR_EACH_BB_FN (bb, cfun)
5860 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5861 if (gimple_debug_bind_p (gsi_stmt (gsi)))
5862 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
5865 /* Make sure all values used by the optimization passes have sane
5866 defaults. */
5867 reg_renumber = 0;
5869 /* Some backends want to know that we are expanding to RTL. */
5870 currently_expanding_to_rtl = 1;
5871 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5872 free_dominance_info (CDI_DOMINATORS);
5874 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5876 if (chkp_function_instrumented_p (current_function_decl))
5877 chkp_reset_rtl_bounds ();
5879 insn_locations_init ();
5880 if (!DECL_IS_BUILTIN (current_function_decl))
5882 /* Eventually, all FEs should explicitly set function_start_locus. */
5883 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5884 set_curr_insn_location
5885 (DECL_SOURCE_LOCATION (current_function_decl));
5886 else
5887 set_curr_insn_location (fun->function_start_locus);
5889 else
5890 set_curr_insn_location (UNKNOWN_LOCATION);
5891 prologue_location = curr_insn_location ();
5893 #ifdef INSN_SCHEDULING
5894 init_sched_attrs ();
5895 #endif
5897 /* Make sure first insn is a note even if we don't want linenums.
5898 This makes sure the first insn will never be deleted.
5899 Also, final expects a note to appear there. */
5900 emit_note (NOTE_INSN_DELETED);
5902 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5903 discover_nonconstant_array_refs ();
5905 targetm.expand_to_rtl_hook ();
5906 crtl->stack_alignment_needed = STACK_BOUNDARY;
5907 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5908 crtl->stack_alignment_estimated = 0;
5909 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5910 fun->cfg->max_jumptable_ents = 0;
5912 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5913 of the function section at exapnsion time to predict distance of calls. */
5914 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5916 /* Expand the variables recorded during gimple lowering. */
5917 timevar_push (TV_VAR_EXPAND);
5918 start_sequence ();
5920 var_ret_seq = expand_used_vars ();
5922 var_seq = get_insns ();
5923 end_sequence ();
5924 timevar_pop (TV_VAR_EXPAND);
5926 /* Honor stack protection warnings. */
5927 if (warn_stack_protect)
5929 if (fun->calls_alloca)
5930 warning (OPT_Wstack_protector,
5931 "stack protector not protecting local variables: "
5932 "variable length buffer");
5933 if (has_short_buffer && !crtl->stack_protect_guard)
5934 warning (OPT_Wstack_protector,
5935 "stack protector not protecting function: "
5936 "all local arrays are less than %d bytes long",
5937 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5940 /* Set up parameters and prepare for return, for the function. */
5941 expand_function_start (current_function_decl);
5943 /* If we emitted any instructions for setting up the variables,
5944 emit them before the FUNCTION_START note. */
5945 if (var_seq)
5947 emit_insn_before (var_seq, parm_birth_insn);
5949 /* In expand_function_end we'll insert the alloca save/restore
5950 before parm_birth_insn. We've just insertted an alloca call.
5951 Adjust the pointer to match. */
5952 parm_birth_insn = var_seq;
5955 /* Now that we also have the parameter RTXs, copy them over to our
5956 partitions. */
5957 for (i = 0; i < SA.map->num_partitions; i++)
5959 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5961 if (TREE_CODE (var) != VAR_DECL
5962 && !SA.partition_to_pseudo[i])
5963 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5964 gcc_assert (SA.partition_to_pseudo[i]);
5966 /* If this decl was marked as living in multiple places, reset
5967 this now to NULL. */
5968 if (DECL_RTL_IF_SET (var) == pc_rtx)
5969 SET_DECL_RTL (var, NULL);
5971 /* Some RTL parts really want to look at DECL_RTL(x) when x
5972 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5973 SET_DECL_RTL here making this available, but that would mean
5974 to select one of the potentially many RTLs for one DECL. Instead
5975 of doing that we simply reset the MEM_EXPR of the RTL in question,
5976 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5977 if (!DECL_RTL_SET_P (var))
5979 if (MEM_P (SA.partition_to_pseudo[i]))
5980 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5984 /* If we have a class containing differently aligned pointers
5985 we need to merge those into the corresponding RTL pointer
5986 alignment. */
5987 for (i = 1; i < num_ssa_names; i++)
5989 tree name = ssa_name (i);
5990 int part;
5991 rtx r;
5993 if (!name
5994 /* We might have generated new SSA names in
5995 update_alias_info_with_stack_vars. They will have a NULL
5996 defining statements, and won't be part of the partitioning,
5997 so ignore those. */
5998 || !SSA_NAME_DEF_STMT (name))
5999 continue;
6000 part = var_to_partition (SA.map, name);
6001 if (part == NO_PARTITION)
6002 continue;
6004 /* Adjust all partition members to get the underlying decl of
6005 the representative which we might have created in expand_one_var. */
6006 if (SSA_NAME_VAR (name) == NULL_TREE)
6008 tree leader = partition_to_var (SA.map, part);
6009 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
6010 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
6012 if (!POINTER_TYPE_P (TREE_TYPE (name)))
6013 continue;
6015 r = SA.partition_to_pseudo[part];
6016 if (REG_P (r))
6017 mark_reg_pointer (r, get_pointer_alignment (name));
6020 /* If this function is `main', emit a call to `__main'
6021 to run global initializers, etc. */
6022 if (DECL_NAME (current_function_decl)
6023 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6024 && DECL_FILE_SCOPE_P (current_function_decl))
6025 expand_main_function ();
6027 /* Initialize the stack_protect_guard field. This must happen after the
6028 call to __main (if any) so that the external decl is initialized. */
6029 if (crtl->stack_protect_guard)
6030 stack_protect_prologue ();
6032 expand_phi_nodes (&SA);
6034 /* Register rtl specific functions for cfg. */
6035 rtl_register_cfg_hooks ();
6037 init_block = construct_init_block ();
6039 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6040 remaining edges later. */
6041 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6042 e->flags &= ~EDGE_EXECUTABLE;
6044 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6045 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6046 next_bb)
6047 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6049 if (MAY_HAVE_DEBUG_INSNS)
6050 expand_debug_locations ();
6052 if (deep_ter_debug_map)
6054 delete deep_ter_debug_map;
6055 deep_ter_debug_map = NULL;
6058 /* Free stuff we no longer need after GIMPLE optimizations. */
6059 free_dominance_info (CDI_DOMINATORS);
6060 free_dominance_info (CDI_POST_DOMINATORS);
6061 delete_tree_cfg_annotations ();
6063 timevar_push (TV_OUT_OF_SSA);
6064 finish_out_of_ssa (&SA);
6065 timevar_pop (TV_OUT_OF_SSA);
6067 timevar_push (TV_POST_EXPAND);
6068 /* We are no longer in SSA form. */
6069 fun->gimple_df->in_ssa_p = false;
6070 loops_state_clear (LOOP_CLOSED_SSA);
6072 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6073 conservatively to true until they are all profile aware. */
6074 delete lab_rtx_for_bb;
6075 free_histograms ();
6077 construct_exit_block ();
6078 insn_locations_finalize ();
6080 if (var_ret_seq)
6082 rtx_insn *after = return_label;
6083 rtx_insn *next = NEXT_INSN (after);
6084 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6085 after = next;
6086 emit_insn_after (var_ret_seq, after);
6089 /* Zap the tree EH table. */
6090 set_eh_throw_stmt_table (fun, NULL);
6092 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6093 split edges which edge insertions might do. */
6094 rebuild_jump_labels (get_insns ());
6096 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6097 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6099 edge e;
6100 edge_iterator ei;
6101 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6103 if (e->insns.r)
6105 rebuild_jump_labels_chain (e->insns.r);
6106 /* Put insns after parm birth, but before
6107 NOTE_INSNS_FUNCTION_BEG. */
6108 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6109 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6111 rtx_insn *insns = e->insns.r;
6112 e->insns.r = NULL;
6113 if (NOTE_P (parm_birth_insn)
6114 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6115 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6116 else
6117 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6119 else
6120 commit_one_edge_insertion (e);
6122 else
6123 ei_next (&ei);
6127 /* We're done expanding trees to RTL. */
6128 currently_expanding_to_rtl = 0;
6130 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6131 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6133 edge e;
6134 edge_iterator ei;
6135 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6137 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6138 e->flags &= ~EDGE_EXECUTABLE;
6140 /* At the moment not all abnormal edges match the RTL
6141 representation. It is safe to remove them here as
6142 find_many_sub_basic_blocks will rediscover them.
6143 In the future we should get this fixed properly. */
6144 if ((e->flags & EDGE_ABNORMAL)
6145 && !(e->flags & EDGE_SIBCALL))
6146 remove_edge (e);
6147 else
6148 ei_next (&ei);
6152 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6153 bitmap_ones (blocks);
6154 find_many_sub_basic_blocks (blocks);
6155 sbitmap_free (blocks);
6156 purge_all_dead_edges ();
6158 expand_stack_alignment ();
6160 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6161 function. */
6162 if (crtl->tail_call_emit)
6163 fixup_tail_calls ();
6165 /* After initial rtl generation, call back to finish generating
6166 exception support code. We need to do this before cleaning up
6167 the CFG as the code does not expect dead landing pads. */
6168 if (fun->eh->region_tree != NULL)
6169 finish_eh_generation ();
6171 /* Remove unreachable blocks, otherwise we cannot compute dominators
6172 which are needed for loop state verification. As a side-effect
6173 this also compacts blocks.
6174 ??? We cannot remove trivially dead insns here as for example
6175 the DRAP reg on i?86 is not magically live at this point.
6176 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6177 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6179 #ifdef ENABLE_CHECKING
6180 verify_flow_info ();
6181 #endif
6183 /* Initialize pseudos allocated for hard registers. */
6184 emit_initial_value_sets ();
6186 /* And finally unshare all RTL. */
6187 unshare_all_rtl ();
6189 /* There's no need to defer outputting this function any more; we
6190 know we want to output it. */
6191 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6193 /* Now that we're done expanding trees to RTL, we shouldn't have any
6194 more CONCATs anywhere. */
6195 generating_concat_p = 0;
6197 if (dump_file)
6199 fprintf (dump_file,
6200 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6201 /* And the pass manager will dump RTL for us. */
6204 /* If we're emitting a nested function, make sure its parent gets
6205 emitted as well. Doing otherwise confuses debug info. */
6207 tree parent;
6208 for (parent = DECL_CONTEXT (current_function_decl);
6209 parent != NULL_TREE;
6210 parent = get_containing_scope (parent))
6211 if (TREE_CODE (parent) == FUNCTION_DECL)
6212 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6215 /* We are now committed to emitting code for this function. Do any
6216 preparation, such as emitting abstract debug info for the inline
6217 before it gets mangled by optimization. */
6218 if (cgraph_function_possibly_inlined_p (current_function_decl))
6219 (*debug_hooks->outlining_inline_function) (current_function_decl);
6221 TREE_ASM_WRITTEN (current_function_decl) = 1;
6223 /* After expanding, the return labels are no longer needed. */
6224 return_label = NULL;
6225 naked_return_label = NULL;
6227 /* After expanding, the tm_restart map is no longer needed. */
6228 if (fun->gimple_df->tm_restart)
6229 fun->gimple_df->tm_restart = NULL;
6231 /* Tag the blocks with a depth number so that change_scope can find
6232 the common parent easily. */
6233 set_block_levels (DECL_INITIAL (fun->decl), 0);
6234 default_rtl_profile ();
6236 timevar_pop (TV_POST_EXPAND);
6238 return 0;
6241 } // anon namespace
6243 rtl_opt_pass *
6244 make_pass_expand (gcc::context *ctxt)
6246 return new pass_expand (ctxt);