2015-01-20 Jeff Law <law@redhat.com>
[official-gcc.git] / gcc / cfgexpand.c
blob4df973bd00cfb792962d048e92afcd7de5e5688f
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "stor-layout.h"
40 #include "stmt.h"
41 #include "print-tree.h"
42 #include "tm_p.h"
43 #include "predict.h"
44 #include "hashtab.h"
45 #include "function.h"
46 #include "dominance.h"
47 #include "cfg.h"
48 #include "cfgrtl.h"
49 #include "cfganal.h"
50 #include "cfgbuild.h"
51 #include "cfgcleanup.h"
52 #include "basic-block.h"
53 #include "insn-codes.h"
54 #include "optabs.h"
55 #include "flags.h"
56 #include "statistics.h"
57 #include "real.h"
58 #include "fixed-value.h"
59 #include "insn-config.h"
60 #include "expmed.h"
61 #include "dojump.h"
62 #include "explow.h"
63 #include "calls.h"
64 #include "emit-rtl.h"
65 #include "expr.h"
66 #include "langhooks.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "tree-eh.h"
71 #include "gimple-expr.h"
72 #include "is-a.h"
73 #include "gimple.h"
74 #include "gimple-iterator.h"
75 #include "gimple-walk.h"
76 #include "gimple-ssa.h"
77 #include "hash-map.h"
78 #include "plugin-api.h"
79 #include "ipa-ref.h"
80 #include "cgraph.h"
81 #include "tree-cfg.h"
82 #include "tree-phinodes.h"
83 #include "ssa-iterators.h"
84 #include "tree-ssanames.h"
85 #include "tree-dfa.h"
86 #include "tree-ssa.h"
87 #include "tree-pass.h"
88 #include "except.h"
89 #include "diagnostic.h"
90 #include "gimple-pretty-print.h"
91 #include "toplev.h"
92 #include "debug.h"
93 #include "params.h"
94 #include "tree-inline.h"
95 #include "value-prof.h"
96 #include "target.h"
97 #include "tree-ssa-live.h"
98 #include "tree-outof-ssa.h"
99 #include "sbitmap.h"
100 #include "cfgloop.h"
101 #include "regs.h" /* For reg_renumber. */
102 #include "insn-attr.h" /* For INSN_SCHEDULING. */
103 #include "asan.h"
104 #include "tree-ssa-address.h"
105 #include "recog.h"
106 #include "output.h"
107 #include "builtins.h"
108 #include "tree-chkp.h"
109 #include "rtl-chkp.h"
111 /* Some systems use __main in a way incompatible with its use in gcc, in these
112 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
113 give the same symbol without quotes for an alternative entry point. You
114 must define both, or neither. */
115 #ifndef NAME__MAIN
116 #define NAME__MAIN "__main"
117 #endif
119 /* This variable holds information helping the rewriting of SSA trees
120 into RTL. */
121 struct ssaexpand SA;
123 /* This variable holds the currently expanded gimple statement for purposes
124 of comminucating the profile info to the builtin expanders. */
125 gimple currently_expanding_gimple_stmt;
127 static rtx expand_debug_expr (tree);
129 /* Return an expression tree corresponding to the RHS of GIMPLE
130 statement STMT. */
132 tree
133 gimple_assign_rhs_to_tree (gimple stmt)
135 tree t;
136 enum gimple_rhs_class grhs_class;
138 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
140 if (grhs_class == GIMPLE_TERNARY_RHS)
141 t = build3 (gimple_assign_rhs_code (stmt),
142 TREE_TYPE (gimple_assign_lhs (stmt)),
143 gimple_assign_rhs1 (stmt),
144 gimple_assign_rhs2 (stmt),
145 gimple_assign_rhs3 (stmt));
146 else if (grhs_class == GIMPLE_BINARY_RHS)
147 t = build2 (gimple_assign_rhs_code (stmt),
148 TREE_TYPE (gimple_assign_lhs (stmt)),
149 gimple_assign_rhs1 (stmt),
150 gimple_assign_rhs2 (stmt));
151 else if (grhs_class == GIMPLE_UNARY_RHS)
152 t = build1 (gimple_assign_rhs_code (stmt),
153 TREE_TYPE (gimple_assign_lhs (stmt)),
154 gimple_assign_rhs1 (stmt));
155 else if (grhs_class == GIMPLE_SINGLE_RHS)
157 t = gimple_assign_rhs1 (stmt);
158 /* Avoid modifying this tree in place below. */
159 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
160 && gimple_location (stmt) != EXPR_LOCATION (t))
161 || (gimple_block (stmt)
162 && currently_expanding_to_rtl
163 && EXPR_P (t)))
164 t = copy_node (t);
166 else
167 gcc_unreachable ();
169 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
170 SET_EXPR_LOCATION (t, gimple_location (stmt));
172 return t;
176 #ifndef STACK_ALIGNMENT_NEEDED
177 #define STACK_ALIGNMENT_NEEDED 1
178 #endif
180 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
182 /* Associate declaration T with storage space X. If T is no
183 SSA name this is exactly SET_DECL_RTL, otherwise make the
184 partition of T associated with X. */
185 static inline void
186 set_rtl (tree t, rtx x)
188 if (TREE_CODE (t) == SSA_NAME)
190 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
191 if (x && !MEM_P (x))
192 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
193 /* For the benefit of debug information at -O0 (where vartracking
194 doesn't run) record the place also in the base DECL if it's
195 a normal variable (not a parameter). */
196 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
198 tree var = SSA_NAME_VAR (t);
199 /* If we don't yet have something recorded, just record it now. */
200 if (!DECL_RTL_SET_P (var))
201 SET_DECL_RTL (var, x);
202 /* If we have it set already to "multiple places" don't
203 change this. */
204 else if (DECL_RTL (var) == pc_rtx)
206 /* If we have something recorded and it's not the same place
207 as we want to record now, we have multiple partitions for the
208 same base variable, with different places. We can't just
209 randomly chose one, hence we have to say that we don't know.
210 This only happens with optimization, and there var-tracking
211 will figure out the right thing. */
212 else if (DECL_RTL (var) != x)
213 SET_DECL_RTL (var, pc_rtx);
216 else
217 SET_DECL_RTL (t, x);
220 /* This structure holds data relevant to one variable that will be
221 placed in a stack slot. */
222 struct stack_var
224 /* The Variable. */
225 tree decl;
227 /* Initially, the size of the variable. Later, the size of the partition,
228 if this variable becomes it's partition's representative. */
229 HOST_WIDE_INT size;
231 /* The *byte* alignment required for this variable. Or as, with the
232 size, the alignment for this partition. */
233 unsigned int alignb;
235 /* The partition representative. */
236 size_t representative;
238 /* The next stack variable in the partition, or EOC. */
239 size_t next;
241 /* The numbers of conflicting stack variables. */
242 bitmap conflicts;
245 #define EOC ((size_t)-1)
247 /* We have an array of such objects while deciding allocation. */
248 static struct stack_var *stack_vars;
249 static size_t stack_vars_alloc;
250 static size_t stack_vars_num;
251 static hash_map<tree, size_t> *decl_to_stack_part;
253 /* Conflict bitmaps go on this obstack. This allows us to destroy
254 all of them in one big sweep. */
255 static bitmap_obstack stack_var_bitmap_obstack;
257 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
258 is non-decreasing. */
259 static size_t *stack_vars_sorted;
261 /* The phase of the stack frame. This is the known misalignment of
262 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
263 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
264 static int frame_phase;
266 /* Used during expand_used_vars to remember if we saw any decls for
267 which we'd like to enable stack smashing protection. */
268 static bool has_protected_decls;
270 /* Used during expand_used_vars. Remember if we say a character buffer
271 smaller than our cutoff threshold. Used for -Wstack-protector. */
272 static bool has_short_buffer;
274 /* Compute the byte alignment to use for DECL. Ignore alignment
275 we can't do with expected alignment of the stack boundary. */
277 static unsigned int
278 align_local_variable (tree decl)
280 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
281 DECL_ALIGN (decl) = align;
282 return align / BITS_PER_UNIT;
285 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
286 Return the frame offset. */
288 static HOST_WIDE_INT
289 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
291 HOST_WIDE_INT offset, new_frame_offset;
293 new_frame_offset = frame_offset;
294 if (FRAME_GROWS_DOWNWARD)
296 new_frame_offset -= size + frame_phase;
297 new_frame_offset &= -align;
298 new_frame_offset += frame_phase;
299 offset = new_frame_offset;
301 else
303 new_frame_offset -= frame_phase;
304 new_frame_offset += align - 1;
305 new_frame_offset &= -align;
306 new_frame_offset += frame_phase;
307 offset = new_frame_offset;
308 new_frame_offset += size;
310 frame_offset = new_frame_offset;
312 if (frame_offset_overflow (frame_offset, cfun->decl))
313 frame_offset = offset = 0;
315 return offset;
318 /* Accumulate DECL into STACK_VARS. */
320 static void
321 add_stack_var (tree decl)
323 struct stack_var *v;
325 if (stack_vars_num >= stack_vars_alloc)
327 if (stack_vars_alloc)
328 stack_vars_alloc = stack_vars_alloc * 3 / 2;
329 else
330 stack_vars_alloc = 32;
331 stack_vars
332 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
334 if (!decl_to_stack_part)
335 decl_to_stack_part = new hash_map<tree, size_t>;
337 v = &stack_vars[stack_vars_num];
338 decl_to_stack_part->put (decl, stack_vars_num);
340 v->decl = decl;
341 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
342 /* Ensure that all variables have size, so that &a != &b for any two
343 variables that are simultaneously live. */
344 if (v->size == 0)
345 v->size = 1;
346 v->alignb = align_local_variable (SSAVAR (decl));
347 /* An alignment of zero can mightily confuse us later. */
348 gcc_assert (v->alignb != 0);
350 /* All variables are initially in their own partition. */
351 v->representative = stack_vars_num;
352 v->next = EOC;
354 /* All variables initially conflict with no other. */
355 v->conflicts = NULL;
357 /* Ensure that this decl doesn't get put onto the list twice. */
358 set_rtl (decl, pc_rtx);
360 stack_vars_num++;
363 /* Make the decls associated with luid's X and Y conflict. */
365 static void
366 add_stack_var_conflict (size_t x, size_t y)
368 struct stack_var *a = &stack_vars[x];
369 struct stack_var *b = &stack_vars[y];
370 if (!a->conflicts)
371 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
372 if (!b->conflicts)
373 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
374 bitmap_set_bit (a->conflicts, y);
375 bitmap_set_bit (b->conflicts, x);
378 /* Check whether the decls associated with luid's X and Y conflict. */
380 static bool
381 stack_var_conflict_p (size_t x, size_t y)
383 struct stack_var *a = &stack_vars[x];
384 struct stack_var *b = &stack_vars[y];
385 if (x == y)
386 return false;
387 /* Partitions containing an SSA name result from gimple registers
388 with things like unsupported modes. They are top-level and
389 hence conflict with everything else. */
390 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
391 return true;
393 if (!a->conflicts || !b->conflicts)
394 return false;
395 return bitmap_bit_p (a->conflicts, y);
398 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
399 enter its partition number into bitmap DATA. */
401 static bool
402 visit_op (gimple, tree op, tree, void *data)
404 bitmap active = (bitmap)data;
405 op = get_base_address (op);
406 if (op
407 && DECL_P (op)
408 && DECL_RTL_IF_SET (op) == pc_rtx)
410 size_t *v = decl_to_stack_part->get (op);
411 if (v)
412 bitmap_set_bit (active, *v);
414 return false;
417 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
418 record conflicts between it and all currently active other partitions
419 from bitmap DATA. */
421 static bool
422 visit_conflict (gimple, tree op, tree, void *data)
424 bitmap active = (bitmap)data;
425 op = get_base_address (op);
426 if (op
427 && DECL_P (op)
428 && DECL_RTL_IF_SET (op) == pc_rtx)
430 size_t *v = decl_to_stack_part->get (op);
431 if (v && bitmap_set_bit (active, *v))
433 size_t num = *v;
434 bitmap_iterator bi;
435 unsigned i;
436 gcc_assert (num < stack_vars_num);
437 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
438 add_stack_var_conflict (num, i);
441 return false;
444 /* Helper routine for add_scope_conflicts, calculating the active partitions
445 at the end of BB, leaving the result in WORK. We're called to generate
446 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
447 liveness. */
449 static void
450 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
452 edge e;
453 edge_iterator ei;
454 gimple_stmt_iterator gsi;
455 walk_stmt_load_store_addr_fn visit;
457 bitmap_clear (work);
458 FOR_EACH_EDGE (e, ei, bb->preds)
459 bitmap_ior_into (work, (bitmap)e->src->aux);
461 visit = visit_op;
463 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
465 gimple stmt = gsi_stmt (gsi);
466 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
468 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
470 gimple stmt = gsi_stmt (gsi);
472 if (gimple_clobber_p (stmt))
474 tree lhs = gimple_assign_lhs (stmt);
475 size_t *v;
476 /* Nested function lowering might introduce LHSs
477 that are COMPONENT_REFs. */
478 if (TREE_CODE (lhs) != VAR_DECL)
479 continue;
480 if (DECL_RTL_IF_SET (lhs) == pc_rtx
481 && (v = decl_to_stack_part->get (lhs)))
482 bitmap_clear_bit (work, *v);
484 else if (!is_gimple_debug (stmt))
486 if (for_conflict
487 && visit == visit_op)
489 /* If this is the first real instruction in this BB we need
490 to add conflicts for everything live at this point now.
491 Unlike classical liveness for named objects we can't
492 rely on seeing a def/use of the names we're interested in.
493 There might merely be indirect loads/stores. We'd not add any
494 conflicts for such partitions. */
495 bitmap_iterator bi;
496 unsigned i;
497 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
499 struct stack_var *a = &stack_vars[i];
500 if (!a->conflicts)
501 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
502 bitmap_ior_into (a->conflicts, work);
504 visit = visit_conflict;
506 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
511 /* Generate stack partition conflicts between all partitions that are
512 simultaneously live. */
514 static void
515 add_scope_conflicts (void)
517 basic_block bb;
518 bool changed;
519 bitmap work = BITMAP_ALLOC (NULL);
520 int *rpo;
521 int n_bbs;
523 /* We approximate the live range of a stack variable by taking the first
524 mention of its name as starting point(s), and by the end-of-scope
525 death clobber added by gimplify as ending point(s) of the range.
526 This overapproximates in the case we for instance moved an address-taken
527 operation upward, without also moving a dereference to it upwards.
528 But it's conservatively correct as a variable never can hold values
529 before its name is mentioned at least once.
531 We then do a mostly classical bitmap liveness algorithm. */
533 FOR_ALL_BB_FN (bb, cfun)
534 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
536 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
537 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
539 changed = true;
540 while (changed)
542 int i;
543 changed = false;
544 for (i = 0; i < n_bbs; i++)
546 bitmap active;
547 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
548 active = (bitmap)bb->aux;
549 add_scope_conflicts_1 (bb, work, false);
550 if (bitmap_ior_into (active, work))
551 changed = true;
555 FOR_EACH_BB_FN (bb, cfun)
556 add_scope_conflicts_1 (bb, work, true);
558 free (rpo);
559 BITMAP_FREE (work);
560 FOR_ALL_BB_FN (bb, cfun)
561 BITMAP_FREE (bb->aux);
564 /* A subroutine of partition_stack_vars. A comparison function for qsort,
565 sorting an array of indices by the properties of the object. */
567 static int
568 stack_var_cmp (const void *a, const void *b)
570 size_t ia = *(const size_t *)a;
571 size_t ib = *(const size_t *)b;
572 unsigned int aligna = stack_vars[ia].alignb;
573 unsigned int alignb = stack_vars[ib].alignb;
574 HOST_WIDE_INT sizea = stack_vars[ia].size;
575 HOST_WIDE_INT sizeb = stack_vars[ib].size;
576 tree decla = stack_vars[ia].decl;
577 tree declb = stack_vars[ib].decl;
578 bool largea, largeb;
579 unsigned int uida, uidb;
581 /* Primary compare on "large" alignment. Large comes first. */
582 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
583 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
584 if (largea != largeb)
585 return (int)largeb - (int)largea;
587 /* Secondary compare on size, decreasing */
588 if (sizea > sizeb)
589 return -1;
590 if (sizea < sizeb)
591 return 1;
593 /* Tertiary compare on true alignment, decreasing. */
594 if (aligna < alignb)
595 return -1;
596 if (aligna > alignb)
597 return 1;
599 /* Final compare on ID for sort stability, increasing.
600 Two SSA names are compared by their version, SSA names come before
601 non-SSA names, and two normal decls are compared by their DECL_UID. */
602 if (TREE_CODE (decla) == SSA_NAME)
604 if (TREE_CODE (declb) == SSA_NAME)
605 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
606 else
607 return -1;
609 else if (TREE_CODE (declb) == SSA_NAME)
610 return 1;
611 else
612 uida = DECL_UID (decla), uidb = DECL_UID (declb);
613 if (uida < uidb)
614 return 1;
615 if (uida > uidb)
616 return -1;
617 return 0;
620 struct part_traits : default_hashmap_traits
622 template<typename T>
623 static bool
624 is_deleted (T &e)
625 { return e.m_value == reinterpret_cast<void *> (1); }
627 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
628 template<typename T>
629 static void
630 mark_deleted (T &e)
631 { e.m_value = reinterpret_cast<T> (1); }
633 template<typename T>
634 static void
635 mark_empty (T &e)
636 { e.m_value = NULL; }
639 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
641 /* If the points-to solution *PI points to variables that are in a partition
642 together with other variables add all partition members to the pointed-to
643 variables bitmap. */
645 static void
646 add_partitioned_vars_to_ptset (struct pt_solution *pt,
647 part_hashmap *decls_to_partitions,
648 hash_set<bitmap> *visited, bitmap temp)
650 bitmap_iterator bi;
651 unsigned i;
652 bitmap *part;
654 if (pt->anything
655 || pt->vars == NULL
656 /* The pointed-to vars bitmap is shared, it is enough to
657 visit it once. */
658 || visited->add (pt->vars))
659 return;
661 bitmap_clear (temp);
663 /* By using a temporary bitmap to store all members of the partitions
664 we have to add we make sure to visit each of the partitions only
665 once. */
666 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
667 if ((!temp
668 || !bitmap_bit_p (temp, i))
669 && (part = decls_to_partitions->get (i)))
670 bitmap_ior_into (temp, *part);
671 if (!bitmap_empty_p (temp))
672 bitmap_ior_into (pt->vars, temp);
675 /* Update points-to sets based on partition info, so we can use them on RTL.
676 The bitmaps representing stack partitions will be saved until expand,
677 where partitioned decls used as bases in memory expressions will be
678 rewritten. */
680 static void
681 update_alias_info_with_stack_vars (void)
683 part_hashmap *decls_to_partitions = NULL;
684 size_t i, j;
685 tree var = NULL_TREE;
687 for (i = 0; i < stack_vars_num; i++)
689 bitmap part = NULL;
690 tree name;
691 struct ptr_info_def *pi;
693 /* Not interested in partitions with single variable. */
694 if (stack_vars[i].representative != i
695 || stack_vars[i].next == EOC)
696 continue;
698 if (!decls_to_partitions)
700 decls_to_partitions = new part_hashmap;
701 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
704 /* Create an SSA_NAME that points to the partition for use
705 as base during alias-oracle queries on RTL for bases that
706 have been partitioned. */
707 if (var == NULL_TREE)
708 var = create_tmp_var (ptr_type_node);
709 name = make_ssa_name (var);
711 /* Create bitmaps representing partitions. They will be used for
712 points-to sets later, so use GGC alloc. */
713 part = BITMAP_GGC_ALLOC ();
714 for (j = i; j != EOC; j = stack_vars[j].next)
716 tree decl = stack_vars[j].decl;
717 unsigned int uid = DECL_PT_UID (decl);
718 bitmap_set_bit (part, uid);
719 decls_to_partitions->put (uid, part);
720 cfun->gimple_df->decls_to_pointers->put (decl, name);
721 if (TREE_ADDRESSABLE (decl))
722 TREE_ADDRESSABLE (name) = 1;
725 /* Make the SSA name point to all partition members. */
726 pi = get_ptr_info (name);
727 pt_solution_set (&pi->pt, part, false);
730 /* Make all points-to sets that contain one member of a partition
731 contain all members of the partition. */
732 if (decls_to_partitions)
734 unsigned i;
735 hash_set<bitmap> visited;
736 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
738 for (i = 1; i < num_ssa_names; i++)
740 tree name = ssa_name (i);
741 struct ptr_info_def *pi;
743 if (name
744 && POINTER_TYPE_P (TREE_TYPE (name))
745 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
746 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
747 &visited, temp);
750 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
751 decls_to_partitions, &visited, temp);
753 delete decls_to_partitions;
754 BITMAP_FREE (temp);
758 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
759 partitioning algorithm. Partitions A and B are known to be non-conflicting.
760 Merge them into a single partition A. */
762 static void
763 union_stack_vars (size_t a, size_t b)
765 struct stack_var *vb = &stack_vars[b];
766 bitmap_iterator bi;
767 unsigned u;
769 gcc_assert (stack_vars[b].next == EOC);
770 /* Add B to A's partition. */
771 stack_vars[b].next = stack_vars[a].next;
772 stack_vars[b].representative = a;
773 stack_vars[a].next = b;
775 /* Update the required alignment of partition A to account for B. */
776 if (stack_vars[a].alignb < stack_vars[b].alignb)
777 stack_vars[a].alignb = stack_vars[b].alignb;
779 /* Update the interference graph and merge the conflicts. */
780 if (vb->conflicts)
782 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
783 add_stack_var_conflict (a, stack_vars[u].representative);
784 BITMAP_FREE (vb->conflicts);
788 /* A subroutine of expand_used_vars. Binpack the variables into
789 partitions constrained by the interference graph. The overall
790 algorithm used is as follows:
792 Sort the objects by size in descending order.
793 For each object A {
794 S = size(A)
795 O = 0
796 loop {
797 Look for the largest non-conflicting object B with size <= S.
798 UNION (A, B)
803 static void
804 partition_stack_vars (void)
806 size_t si, sj, n = stack_vars_num;
808 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
809 for (si = 0; si < n; ++si)
810 stack_vars_sorted[si] = si;
812 if (n == 1)
813 return;
815 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
817 for (si = 0; si < n; ++si)
819 size_t i = stack_vars_sorted[si];
820 unsigned int ialign = stack_vars[i].alignb;
821 HOST_WIDE_INT isize = stack_vars[i].size;
823 /* Ignore objects that aren't partition representatives. If we
824 see a var that is not a partition representative, it must
825 have been merged earlier. */
826 if (stack_vars[i].representative != i)
827 continue;
829 for (sj = si + 1; sj < n; ++sj)
831 size_t j = stack_vars_sorted[sj];
832 unsigned int jalign = stack_vars[j].alignb;
833 HOST_WIDE_INT jsize = stack_vars[j].size;
835 /* Ignore objects that aren't partition representatives. */
836 if (stack_vars[j].representative != j)
837 continue;
839 /* Do not mix objects of "small" (supported) alignment
840 and "large" (unsupported) alignment. */
841 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
843 break;
845 /* For Address Sanitizer do not mix objects with different
846 sizes, as the shorter vars wouldn't be adequately protected.
847 Don't do that for "large" (unsupported) alignment objects,
848 those aren't protected anyway. */
849 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
850 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
851 break;
853 /* Ignore conflicting objects. */
854 if (stack_var_conflict_p (i, j))
855 continue;
857 /* UNION the objects, placing J at OFFSET. */
858 union_stack_vars (i, j);
862 update_alias_info_with_stack_vars ();
865 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
867 static void
868 dump_stack_var_partition (void)
870 size_t si, i, j, n = stack_vars_num;
872 for (si = 0; si < n; ++si)
874 i = stack_vars_sorted[si];
876 /* Skip variables that aren't partition representatives, for now. */
877 if (stack_vars[i].representative != i)
878 continue;
880 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
881 " align %u\n", (unsigned long) i, stack_vars[i].size,
882 stack_vars[i].alignb);
884 for (j = i; j != EOC; j = stack_vars[j].next)
886 fputc ('\t', dump_file);
887 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
889 fputc ('\n', dump_file);
893 /* Assign rtl to DECL at BASE + OFFSET. */
895 static void
896 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
897 HOST_WIDE_INT offset)
899 unsigned align;
900 rtx x;
902 /* If this fails, we've overflowed the stack frame. Error nicely? */
903 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
905 x = plus_constant (Pmode, base, offset);
906 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
908 if (TREE_CODE (decl) != SSA_NAME)
910 /* Set alignment we actually gave this decl if it isn't an SSA name.
911 If it is we generate stack slots only accidentally so it isn't as
912 important, we'll simply use the alignment that is already set. */
913 if (base == virtual_stack_vars_rtx)
914 offset -= frame_phase;
915 align = offset & -offset;
916 align *= BITS_PER_UNIT;
917 if (align == 0 || align > base_align)
918 align = base_align;
920 /* One would think that we could assert that we're not decreasing
921 alignment here, but (at least) the i386 port does exactly this
922 via the MINIMUM_ALIGNMENT hook. */
924 DECL_ALIGN (decl) = align;
925 DECL_USER_ALIGN (decl) = 0;
928 set_mem_attributes (x, SSAVAR (decl), true);
929 set_rtl (decl, x);
932 struct stack_vars_data
934 /* Vector of offset pairs, always end of some padding followed
935 by start of the padding that needs Address Sanitizer protection.
936 The vector is in reversed, highest offset pairs come first. */
937 vec<HOST_WIDE_INT> asan_vec;
939 /* Vector of partition representative decls in between the paddings. */
940 vec<tree> asan_decl_vec;
942 /* Base pseudo register for Address Sanitizer protected automatic vars. */
943 rtx asan_base;
945 /* Alignment needed for the Address Sanitizer protected automatic vars. */
946 unsigned int asan_alignb;
949 /* A subroutine of expand_used_vars. Give each partition representative
950 a unique location within the stack frame. Update each partition member
951 with that location. */
953 static void
954 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
956 size_t si, i, j, n = stack_vars_num;
957 HOST_WIDE_INT large_size = 0, large_alloc = 0;
958 rtx large_base = NULL;
959 unsigned large_align = 0;
960 tree decl;
962 /* Determine if there are any variables requiring "large" alignment.
963 Since these are dynamically allocated, we only process these if
964 no predicate involved. */
965 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
966 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
968 /* Find the total size of these variables. */
969 for (si = 0; si < n; ++si)
971 unsigned alignb;
973 i = stack_vars_sorted[si];
974 alignb = stack_vars[i].alignb;
976 /* Stop when we get to the first decl with "small" alignment. */
977 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
978 break;
980 /* Skip variables that aren't partition representatives. */
981 if (stack_vars[i].representative != i)
982 continue;
984 /* Skip variables that have already had rtl assigned. See also
985 add_stack_var where we perpetrate this pc_rtx hack. */
986 decl = stack_vars[i].decl;
987 if ((TREE_CODE (decl) == SSA_NAME
988 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
989 : DECL_RTL (decl)) != pc_rtx)
990 continue;
992 large_size += alignb - 1;
993 large_size &= -(HOST_WIDE_INT)alignb;
994 large_size += stack_vars[i].size;
997 /* If there were any, allocate space. */
998 if (large_size > 0)
999 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1000 large_align, true);
1003 for (si = 0; si < n; ++si)
1005 rtx base;
1006 unsigned base_align, alignb;
1007 HOST_WIDE_INT offset;
1009 i = stack_vars_sorted[si];
1011 /* Skip variables that aren't partition representatives, for now. */
1012 if (stack_vars[i].representative != i)
1013 continue;
1015 /* Skip variables that have already had rtl assigned. See also
1016 add_stack_var where we perpetrate this pc_rtx hack. */
1017 decl = stack_vars[i].decl;
1018 if ((TREE_CODE (decl) == SSA_NAME
1019 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1020 : DECL_RTL (decl)) != pc_rtx)
1021 continue;
1023 /* Check the predicate to see whether this variable should be
1024 allocated in this pass. */
1025 if (pred && !pred (i))
1026 continue;
1028 alignb = stack_vars[i].alignb;
1029 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1031 base = virtual_stack_vars_rtx;
1032 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1034 HOST_WIDE_INT prev_offset = frame_offset;
1035 tree repr_decl = NULL_TREE;
1037 offset
1038 = alloc_stack_frame_space (stack_vars[i].size
1039 + ASAN_RED_ZONE_SIZE,
1040 MAX (alignb, ASAN_RED_ZONE_SIZE));
1041 data->asan_vec.safe_push (prev_offset);
1042 data->asan_vec.safe_push (offset + stack_vars[i].size);
1043 /* Find best representative of the partition.
1044 Prefer those with DECL_NAME, even better
1045 satisfying asan_protect_stack_decl predicate. */
1046 for (j = i; j != EOC; j = stack_vars[j].next)
1047 if (asan_protect_stack_decl (stack_vars[j].decl)
1048 && DECL_NAME (stack_vars[j].decl))
1050 repr_decl = stack_vars[j].decl;
1051 break;
1053 else if (repr_decl == NULL_TREE
1054 && DECL_P (stack_vars[j].decl)
1055 && DECL_NAME (stack_vars[j].decl))
1056 repr_decl = stack_vars[j].decl;
1057 if (repr_decl == NULL_TREE)
1058 repr_decl = stack_vars[i].decl;
1059 data->asan_decl_vec.safe_push (repr_decl);
1060 data->asan_alignb = MAX (data->asan_alignb, alignb);
1061 if (data->asan_base == NULL)
1062 data->asan_base = gen_reg_rtx (Pmode);
1063 base = data->asan_base;
1065 if (!STRICT_ALIGNMENT)
1066 base_align = crtl->max_used_stack_slot_alignment;
1067 else
1068 base_align = MAX (crtl->max_used_stack_slot_alignment,
1069 GET_MODE_ALIGNMENT (SImode)
1070 << ASAN_SHADOW_SHIFT);
1072 else
1074 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1075 base_align = crtl->max_used_stack_slot_alignment;
1078 else
1080 /* Large alignment is only processed in the last pass. */
1081 if (pred)
1082 continue;
1083 gcc_assert (large_base != NULL);
1085 large_alloc += alignb - 1;
1086 large_alloc &= -(HOST_WIDE_INT)alignb;
1087 offset = large_alloc;
1088 large_alloc += stack_vars[i].size;
1090 base = large_base;
1091 base_align = large_align;
1094 /* Create rtl for each variable based on their location within the
1095 partition. */
1096 for (j = i; j != EOC; j = stack_vars[j].next)
1098 expand_one_stack_var_at (stack_vars[j].decl,
1099 base, base_align,
1100 offset);
1104 gcc_assert (large_alloc == large_size);
1107 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1108 static HOST_WIDE_INT
1109 account_stack_vars (void)
1111 size_t si, j, i, n = stack_vars_num;
1112 HOST_WIDE_INT size = 0;
1114 for (si = 0; si < n; ++si)
1116 i = stack_vars_sorted[si];
1118 /* Skip variables that aren't partition representatives, for now. */
1119 if (stack_vars[i].representative != i)
1120 continue;
1122 size += stack_vars[i].size;
1123 for (j = i; j != EOC; j = stack_vars[j].next)
1124 set_rtl (stack_vars[j].decl, NULL);
1126 return size;
1129 /* A subroutine of expand_one_var. Called to immediately assign rtl
1130 to a variable to be allocated in the stack frame. */
1132 static void
1133 expand_one_stack_var (tree var)
1135 HOST_WIDE_INT size, offset;
1136 unsigned byte_align;
1138 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1139 byte_align = align_local_variable (SSAVAR (var));
1141 /* We handle highly aligned variables in expand_stack_vars. */
1142 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1144 offset = alloc_stack_frame_space (size, byte_align);
1146 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1147 crtl->max_used_stack_slot_alignment, offset);
1150 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1151 that will reside in a hard register. */
1153 static void
1154 expand_one_hard_reg_var (tree var)
1156 rest_of_decl_compilation (var, 0, 0);
1159 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1160 that will reside in a pseudo register. */
1162 static void
1163 expand_one_register_var (tree var)
1165 tree decl = SSAVAR (var);
1166 tree type = TREE_TYPE (decl);
1167 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1168 rtx x = gen_reg_rtx (reg_mode);
1170 set_rtl (var, x);
1172 /* Note if the object is a user variable. */
1173 if (!DECL_ARTIFICIAL (decl))
1174 mark_user_reg (x);
1176 if (POINTER_TYPE_P (type))
1177 mark_reg_pointer (x, get_pointer_alignment (var));
1180 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1181 has some associated error, e.g. its type is error-mark. We just need
1182 to pick something that won't crash the rest of the compiler. */
1184 static void
1185 expand_one_error_var (tree var)
1187 machine_mode mode = DECL_MODE (var);
1188 rtx x;
1190 if (mode == BLKmode)
1191 x = gen_rtx_MEM (BLKmode, const0_rtx);
1192 else if (mode == VOIDmode)
1193 x = const0_rtx;
1194 else
1195 x = gen_reg_rtx (mode);
1197 SET_DECL_RTL (var, x);
1200 /* A subroutine of expand_one_var. VAR is a variable that will be
1201 allocated to the local stack frame. Return true if we wish to
1202 add VAR to STACK_VARS so that it will be coalesced with other
1203 variables. Return false to allocate VAR immediately.
1205 This function is used to reduce the number of variables considered
1206 for coalescing, which reduces the size of the quadratic problem. */
1208 static bool
1209 defer_stack_allocation (tree var, bool toplevel)
1211 /* Whether the variable is small enough for immediate allocation not to be
1212 a problem with regard to the frame size. */
1213 bool smallish
1214 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1215 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1217 /* If stack protection is enabled, *all* stack variables must be deferred,
1218 so that we can re-order the strings to the top of the frame.
1219 Similarly for Address Sanitizer. */
1220 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1221 return true;
1223 /* We handle "large" alignment via dynamic allocation. We want to handle
1224 this extra complication in only one place, so defer them. */
1225 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1226 return true;
1228 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1229 might be detached from their block and appear at toplevel when we reach
1230 here. We want to coalesce them with variables from other blocks when
1231 the immediate contribution to the frame size would be noticeable. */
1232 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1233 return true;
1235 /* Variables declared in the outermost scope automatically conflict
1236 with every other variable. The only reason to want to defer them
1237 at all is that, after sorting, we can more efficiently pack
1238 small variables in the stack frame. Continue to defer at -O2. */
1239 if (toplevel && optimize < 2)
1240 return false;
1242 /* Without optimization, *most* variables are allocated from the
1243 stack, which makes the quadratic problem large exactly when we
1244 want compilation to proceed as quickly as possible. On the
1245 other hand, we don't want the function's stack frame size to
1246 get completely out of hand. So we avoid adding scalars and
1247 "small" aggregates to the list at all. */
1248 if (optimize == 0 && smallish)
1249 return false;
1251 return true;
1254 /* A subroutine of expand_used_vars. Expand one variable according to
1255 its flavor. Variables to be placed on the stack are not actually
1256 expanded yet, merely recorded.
1257 When REALLY_EXPAND is false, only add stack values to be allocated.
1258 Return stack usage this variable is supposed to take.
1261 static HOST_WIDE_INT
1262 expand_one_var (tree var, bool toplevel, bool really_expand)
1264 unsigned int align = BITS_PER_UNIT;
1265 tree origvar = var;
1267 var = SSAVAR (var);
1269 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1271 /* Because we don't know if VAR will be in register or on stack,
1272 we conservatively assume it will be on stack even if VAR is
1273 eventually put into register after RA pass. For non-automatic
1274 variables, which won't be on stack, we collect alignment of
1275 type and ignore user specified alignment. Similarly for
1276 SSA_NAMEs for which use_register_for_decl returns true. */
1277 if (TREE_STATIC (var)
1278 || DECL_EXTERNAL (var)
1279 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1280 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1281 TYPE_MODE (TREE_TYPE (var)),
1282 TYPE_ALIGN (TREE_TYPE (var)));
1283 else if (DECL_HAS_VALUE_EXPR_P (var)
1284 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1285 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1286 or variables which were assigned a stack slot already by
1287 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1288 changed from the offset chosen to it. */
1289 align = crtl->stack_alignment_estimated;
1290 else
1291 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1293 /* If the variable alignment is very large we'll dynamicaly allocate
1294 it, which means that in-frame portion is just a pointer. */
1295 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1296 align = POINTER_SIZE;
1299 if (SUPPORTS_STACK_ALIGNMENT
1300 && crtl->stack_alignment_estimated < align)
1302 /* stack_alignment_estimated shouldn't change after stack
1303 realign decision made */
1304 gcc_assert (!crtl->stack_realign_processed);
1305 crtl->stack_alignment_estimated = align;
1308 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1309 So here we only make sure stack_alignment_needed >= align. */
1310 if (crtl->stack_alignment_needed < align)
1311 crtl->stack_alignment_needed = align;
1312 if (crtl->max_used_stack_slot_alignment < align)
1313 crtl->max_used_stack_slot_alignment = align;
1315 if (TREE_CODE (origvar) == SSA_NAME)
1317 gcc_assert (TREE_CODE (var) != VAR_DECL
1318 || (!DECL_EXTERNAL (var)
1319 && !DECL_HAS_VALUE_EXPR_P (var)
1320 && !TREE_STATIC (var)
1321 && TREE_TYPE (var) != error_mark_node
1322 && !DECL_HARD_REGISTER (var)
1323 && really_expand));
1325 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1327 else if (DECL_EXTERNAL (var))
1329 else if (DECL_HAS_VALUE_EXPR_P (var))
1331 else if (TREE_STATIC (var))
1333 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1335 else if (TREE_TYPE (var) == error_mark_node)
1337 if (really_expand)
1338 expand_one_error_var (var);
1340 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1342 if (really_expand)
1344 expand_one_hard_reg_var (var);
1345 if (!DECL_HARD_REGISTER (var))
1346 /* Invalid register specification. */
1347 expand_one_error_var (var);
1350 else if (use_register_for_decl (var))
1352 if (really_expand)
1353 expand_one_register_var (origvar);
1355 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1357 /* Reject variables which cover more than half of the address-space. */
1358 if (really_expand)
1360 error ("size of variable %q+D is too large", var);
1361 expand_one_error_var (var);
1364 else if (defer_stack_allocation (var, toplevel))
1365 add_stack_var (origvar);
1366 else
1368 if (really_expand)
1369 expand_one_stack_var (origvar);
1370 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1372 return 0;
1375 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1376 expanding variables. Those variables that can be put into registers
1377 are allocated pseudos; those that can't are put on the stack.
1379 TOPLEVEL is true if this is the outermost BLOCK. */
1381 static void
1382 expand_used_vars_for_block (tree block, bool toplevel)
1384 tree t;
1386 /* Expand all variables at this level. */
1387 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1388 if (TREE_USED (t)
1389 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1390 || !DECL_NONSHAREABLE (t)))
1391 expand_one_var (t, toplevel, true);
1393 /* Expand all variables at containing levels. */
1394 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1395 expand_used_vars_for_block (t, false);
1398 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1399 and clear TREE_USED on all local variables. */
1401 static void
1402 clear_tree_used (tree block)
1404 tree t;
1406 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1407 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1408 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1409 || !DECL_NONSHAREABLE (t))
1410 TREE_USED (t) = 0;
1412 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1413 clear_tree_used (t);
1416 enum {
1417 SPCT_FLAG_DEFAULT = 1,
1418 SPCT_FLAG_ALL = 2,
1419 SPCT_FLAG_STRONG = 3,
1420 SPCT_FLAG_EXPLICIT = 4
1423 /* Examine TYPE and determine a bit mask of the following features. */
1425 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1426 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1427 #define SPCT_HAS_ARRAY 4
1428 #define SPCT_HAS_AGGREGATE 8
1430 static unsigned int
1431 stack_protect_classify_type (tree type)
1433 unsigned int ret = 0;
1434 tree t;
1436 switch (TREE_CODE (type))
1438 case ARRAY_TYPE:
1439 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1440 if (t == char_type_node
1441 || t == signed_char_type_node
1442 || t == unsigned_char_type_node)
1444 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1445 unsigned HOST_WIDE_INT len;
1447 if (!TYPE_SIZE_UNIT (type)
1448 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1449 len = max;
1450 else
1451 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1453 if (len < max)
1454 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1455 else
1456 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1458 else
1459 ret = SPCT_HAS_ARRAY;
1460 break;
1462 case UNION_TYPE:
1463 case QUAL_UNION_TYPE:
1464 case RECORD_TYPE:
1465 ret = SPCT_HAS_AGGREGATE;
1466 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1467 if (TREE_CODE (t) == FIELD_DECL)
1468 ret |= stack_protect_classify_type (TREE_TYPE (t));
1469 break;
1471 default:
1472 break;
1475 return ret;
1478 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1479 part of the local stack frame. Remember if we ever return nonzero for
1480 any variable in this function. The return value is the phase number in
1481 which the variable should be allocated. */
1483 static int
1484 stack_protect_decl_phase (tree decl)
1486 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1487 int ret = 0;
1489 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1490 has_short_buffer = true;
1492 if (flag_stack_protect == SPCT_FLAG_ALL
1493 || flag_stack_protect == SPCT_FLAG_STRONG
1494 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1495 && lookup_attribute ("stack_protect",
1496 DECL_ATTRIBUTES (current_function_decl))))
1498 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1499 && !(bits & SPCT_HAS_AGGREGATE))
1500 ret = 1;
1501 else if (bits & SPCT_HAS_ARRAY)
1502 ret = 2;
1504 else
1505 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1507 if (ret)
1508 has_protected_decls = true;
1510 return ret;
1513 /* Two helper routines that check for phase 1 and phase 2. These are used
1514 as callbacks for expand_stack_vars. */
1516 static bool
1517 stack_protect_decl_phase_1 (size_t i)
1519 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1522 static bool
1523 stack_protect_decl_phase_2 (size_t i)
1525 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1528 /* And helper function that checks for asan phase (with stack protector
1529 it is phase 3). This is used as callback for expand_stack_vars.
1530 Returns true if any of the vars in the partition need to be protected. */
1532 static bool
1533 asan_decl_phase_3 (size_t i)
1535 while (i != EOC)
1537 if (asan_protect_stack_decl (stack_vars[i].decl))
1538 return true;
1539 i = stack_vars[i].next;
1541 return false;
1544 /* Ensure that variables in different stack protection phases conflict
1545 so that they are not merged and share the same stack slot. */
1547 static void
1548 add_stack_protection_conflicts (void)
1550 size_t i, j, n = stack_vars_num;
1551 unsigned char *phase;
1553 phase = XNEWVEC (unsigned char, n);
1554 for (i = 0; i < n; ++i)
1555 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1557 for (i = 0; i < n; ++i)
1559 unsigned char ph_i = phase[i];
1560 for (j = i + 1; j < n; ++j)
1561 if (ph_i != phase[j])
1562 add_stack_var_conflict (i, j);
1565 XDELETEVEC (phase);
1568 /* Create a decl for the guard at the top of the stack frame. */
1570 static void
1571 create_stack_guard (void)
1573 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1574 VAR_DECL, NULL, ptr_type_node);
1575 TREE_THIS_VOLATILE (guard) = 1;
1576 TREE_USED (guard) = 1;
1577 expand_one_stack_var (guard);
1578 crtl->stack_protect_guard = guard;
1581 /* Prepare for expanding variables. */
1582 static void
1583 init_vars_expansion (void)
1585 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1586 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1588 /* A map from decl to stack partition. */
1589 decl_to_stack_part = new hash_map<tree, size_t>;
1591 /* Initialize local stack smashing state. */
1592 has_protected_decls = false;
1593 has_short_buffer = false;
1596 /* Free up stack variable graph data. */
1597 static void
1598 fini_vars_expansion (void)
1600 bitmap_obstack_release (&stack_var_bitmap_obstack);
1601 if (stack_vars)
1602 XDELETEVEC (stack_vars);
1603 if (stack_vars_sorted)
1604 XDELETEVEC (stack_vars_sorted);
1605 stack_vars = NULL;
1606 stack_vars_sorted = NULL;
1607 stack_vars_alloc = stack_vars_num = 0;
1608 delete decl_to_stack_part;
1609 decl_to_stack_part = NULL;
1612 /* Make a fair guess for the size of the stack frame of the function
1613 in NODE. This doesn't have to be exact, the result is only used in
1614 the inline heuristics. So we don't want to run the full stack var
1615 packing algorithm (which is quadratic in the number of stack vars).
1616 Instead, we calculate the total size of all stack vars. This turns
1617 out to be a pretty fair estimate -- packing of stack vars doesn't
1618 happen very often. */
1620 HOST_WIDE_INT
1621 estimated_stack_frame_size (struct cgraph_node *node)
1623 HOST_WIDE_INT size = 0;
1624 size_t i;
1625 tree var;
1626 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1628 push_cfun (fn);
1630 init_vars_expansion ();
1632 FOR_EACH_LOCAL_DECL (fn, i, var)
1633 if (auto_var_in_fn_p (var, fn->decl))
1634 size += expand_one_var (var, true, false);
1636 if (stack_vars_num > 0)
1638 /* Fake sorting the stack vars for account_stack_vars (). */
1639 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1640 for (i = 0; i < stack_vars_num; ++i)
1641 stack_vars_sorted[i] = i;
1642 size += account_stack_vars ();
1645 fini_vars_expansion ();
1646 pop_cfun ();
1647 return size;
1650 /* Helper routine to check if a record or union contains an array field. */
1652 static int
1653 record_or_union_type_has_array_p (const_tree tree_type)
1655 tree fields = TYPE_FIELDS (tree_type);
1656 tree f;
1658 for (f = fields; f; f = DECL_CHAIN (f))
1659 if (TREE_CODE (f) == FIELD_DECL)
1661 tree field_type = TREE_TYPE (f);
1662 if (RECORD_OR_UNION_TYPE_P (field_type)
1663 && record_or_union_type_has_array_p (field_type))
1664 return 1;
1665 if (TREE_CODE (field_type) == ARRAY_TYPE)
1666 return 1;
1668 return 0;
1671 /* Check if the current function has local referenced variables that
1672 have their addresses taken, contain an array, or are arrays. */
1674 static bool
1675 stack_protect_decl_p ()
1677 unsigned i;
1678 tree var;
1680 FOR_EACH_LOCAL_DECL (cfun, i, var)
1681 if (!is_global_var (var))
1683 tree var_type = TREE_TYPE (var);
1684 if (TREE_CODE (var) == VAR_DECL
1685 && (TREE_CODE (var_type) == ARRAY_TYPE
1686 || TREE_ADDRESSABLE (var)
1687 || (RECORD_OR_UNION_TYPE_P (var_type)
1688 && record_or_union_type_has_array_p (var_type))))
1689 return true;
1691 return false;
1694 /* Check if the current function has calls that use a return slot. */
1696 static bool
1697 stack_protect_return_slot_p ()
1699 basic_block bb;
1701 FOR_ALL_BB_FN (bb, cfun)
1702 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1703 !gsi_end_p (gsi); gsi_next (&gsi))
1705 gimple stmt = gsi_stmt (gsi);
1706 /* This assumes that calls to internal-only functions never
1707 use a return slot. */
1708 if (is_gimple_call (stmt)
1709 && !gimple_call_internal_p (stmt)
1710 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1711 gimple_call_fndecl (stmt)))
1712 return true;
1714 return false;
1717 /* Expand all variables used in the function. */
1719 static rtx_insn *
1720 expand_used_vars (void)
1722 tree var, outer_block = DECL_INITIAL (current_function_decl);
1723 vec<tree> maybe_local_decls = vNULL;
1724 rtx_insn *var_end_seq = NULL;
1725 unsigned i;
1726 unsigned len;
1727 bool gen_stack_protect_signal = false;
1729 /* Compute the phase of the stack frame for this function. */
1731 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1732 int off = STARTING_FRAME_OFFSET % align;
1733 frame_phase = off ? align - off : 0;
1736 /* Set TREE_USED on all variables in the local_decls. */
1737 FOR_EACH_LOCAL_DECL (cfun, i, var)
1738 TREE_USED (var) = 1;
1739 /* Clear TREE_USED on all variables associated with a block scope. */
1740 clear_tree_used (DECL_INITIAL (current_function_decl));
1742 init_vars_expansion ();
1744 if (targetm.use_pseudo_pic_reg ())
1745 pic_offset_table_rtx = gen_reg_rtx (Pmode);
1747 hash_map<tree, tree> ssa_name_decls;
1748 for (i = 0; i < SA.map->num_partitions; i++)
1750 tree var = partition_to_var (SA.map, i);
1752 gcc_assert (!virtual_operand_p (var));
1754 /* Assign decls to each SSA name partition, share decls for partitions
1755 we could have coalesced (those with the same type). */
1756 if (SSA_NAME_VAR (var) == NULL_TREE)
1758 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1759 if (!*slot)
1760 *slot = create_tmp_reg (TREE_TYPE (var));
1761 replace_ssa_name_symbol (var, *slot);
1764 /* Always allocate space for partitions based on VAR_DECLs. But for
1765 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1766 debug info, there is no need to do so if optimization is disabled
1767 because all the SSA_NAMEs based on these DECLs have been coalesced
1768 into a single partition, which is thus assigned the canonical RTL
1769 location of the DECLs. If in_lto_p, we can't rely on optimize,
1770 a function could be compiled with -O1 -flto first and only the
1771 link performed at -O0. */
1772 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1773 expand_one_var (var, true, true);
1774 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1776 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1777 contain the default def (representing the parm or result itself)
1778 we don't do anything here. But those which don't contain the
1779 default def (representing a temporary based on the parm/result)
1780 we need to allocate space just like for normal VAR_DECLs. */
1781 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1783 expand_one_var (var, true, true);
1784 gcc_assert (SA.partition_to_pseudo[i]);
1789 if (flag_stack_protect == SPCT_FLAG_STRONG)
1790 gen_stack_protect_signal
1791 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1793 /* At this point all variables on the local_decls with TREE_USED
1794 set are not associated with any block scope. Lay them out. */
1796 len = vec_safe_length (cfun->local_decls);
1797 FOR_EACH_LOCAL_DECL (cfun, i, var)
1799 bool expand_now = false;
1801 /* Expanded above already. */
1802 if (is_gimple_reg (var))
1804 TREE_USED (var) = 0;
1805 goto next;
1807 /* We didn't set a block for static or extern because it's hard
1808 to tell the difference between a global variable (re)declared
1809 in a local scope, and one that's really declared there to
1810 begin with. And it doesn't really matter much, since we're
1811 not giving them stack space. Expand them now. */
1812 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1813 expand_now = true;
1815 /* Expand variables not associated with any block now. Those created by
1816 the optimizers could be live anywhere in the function. Those that
1817 could possibly have been scoped originally and detached from their
1818 block will have their allocation deferred so we coalesce them with
1819 others when optimization is enabled. */
1820 else if (TREE_USED (var))
1821 expand_now = true;
1823 /* Finally, mark all variables on the list as used. We'll use
1824 this in a moment when we expand those associated with scopes. */
1825 TREE_USED (var) = 1;
1827 if (expand_now)
1828 expand_one_var (var, true, true);
1830 next:
1831 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1833 rtx rtl = DECL_RTL_IF_SET (var);
1835 /* Keep artificial non-ignored vars in cfun->local_decls
1836 chain until instantiate_decls. */
1837 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1838 add_local_decl (cfun, var);
1839 else if (rtl == NULL_RTX)
1840 /* If rtl isn't set yet, which can happen e.g. with
1841 -fstack-protector, retry before returning from this
1842 function. */
1843 maybe_local_decls.safe_push (var);
1847 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1849 +-----------------+-----------------+
1850 | ...processed... | ...duplicates...|
1851 +-----------------+-----------------+
1853 +-- LEN points here.
1855 We just want the duplicates, as those are the artificial
1856 non-ignored vars that we want to keep until instantiate_decls.
1857 Move them down and truncate the array. */
1858 if (!vec_safe_is_empty (cfun->local_decls))
1859 cfun->local_decls->block_remove (0, len);
1861 /* At this point, all variables within the block tree with TREE_USED
1862 set are actually used by the optimized function. Lay them out. */
1863 expand_used_vars_for_block (outer_block, true);
1865 if (stack_vars_num > 0)
1867 add_scope_conflicts ();
1869 /* If stack protection is enabled, we don't share space between
1870 vulnerable data and non-vulnerable data. */
1871 if (flag_stack_protect != 0
1872 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
1873 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1874 && lookup_attribute ("stack_protect",
1875 DECL_ATTRIBUTES (current_function_decl)))))
1876 add_stack_protection_conflicts ();
1878 /* Now that we have collected all stack variables, and have computed a
1879 minimal interference graph, attempt to save some stack space. */
1880 partition_stack_vars ();
1881 if (dump_file)
1882 dump_stack_var_partition ();
1885 switch (flag_stack_protect)
1887 case SPCT_FLAG_ALL:
1888 create_stack_guard ();
1889 break;
1891 case SPCT_FLAG_STRONG:
1892 if (gen_stack_protect_signal
1893 || cfun->calls_alloca || has_protected_decls
1894 || lookup_attribute ("stack_protect",
1895 DECL_ATTRIBUTES (current_function_decl)))
1896 create_stack_guard ();
1897 break;
1899 case SPCT_FLAG_DEFAULT:
1900 if (cfun->calls_alloca || has_protected_decls
1901 || lookup_attribute ("stack_protect",
1902 DECL_ATTRIBUTES (current_function_decl)))
1903 create_stack_guard ();
1904 break;
1906 case SPCT_FLAG_EXPLICIT:
1907 if (lookup_attribute ("stack_protect",
1908 DECL_ATTRIBUTES (current_function_decl)))
1909 create_stack_guard ();
1910 break;
1911 default:
1915 /* Assign rtl to each variable based on these partitions. */
1916 if (stack_vars_num > 0)
1918 struct stack_vars_data data;
1920 data.asan_vec = vNULL;
1921 data.asan_decl_vec = vNULL;
1922 data.asan_base = NULL_RTX;
1923 data.asan_alignb = 0;
1925 /* Reorder decls to be protected by iterating over the variables
1926 array multiple times, and allocating out of each phase in turn. */
1927 /* ??? We could probably integrate this into the qsort we did
1928 earlier, such that we naturally see these variables first,
1929 and thus naturally allocate things in the right order. */
1930 if (has_protected_decls)
1932 /* Phase 1 contains only character arrays. */
1933 expand_stack_vars (stack_protect_decl_phase_1, &data);
1935 /* Phase 2 contains other kinds of arrays. */
1936 if (flag_stack_protect == SPCT_FLAG_ALL
1937 || flag_stack_protect == SPCT_FLAG_STRONG
1938 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1939 && lookup_attribute ("stack_protect",
1940 DECL_ATTRIBUTES (current_function_decl))))
1941 expand_stack_vars (stack_protect_decl_phase_2, &data);
1944 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1945 /* Phase 3, any partitions that need asan protection
1946 in addition to phase 1 and 2. */
1947 expand_stack_vars (asan_decl_phase_3, &data);
1949 if (!data.asan_vec.is_empty ())
1951 HOST_WIDE_INT prev_offset = frame_offset;
1952 HOST_WIDE_INT offset, sz, redzonesz;
1953 redzonesz = ASAN_RED_ZONE_SIZE;
1954 sz = data.asan_vec[0] - prev_offset;
1955 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1956 && data.asan_alignb <= 4096
1957 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1958 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1959 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1960 offset
1961 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1962 data.asan_vec.safe_push (prev_offset);
1963 data.asan_vec.safe_push (offset);
1964 /* Leave space for alignment if STRICT_ALIGNMENT. */
1965 if (STRICT_ALIGNMENT)
1966 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1967 << ASAN_SHADOW_SHIFT)
1968 / BITS_PER_UNIT, 1);
1970 var_end_seq
1971 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1972 data.asan_base,
1973 data.asan_alignb,
1974 data.asan_vec.address (),
1975 data.asan_decl_vec.address (),
1976 data.asan_vec.length ());
1979 expand_stack_vars (NULL, &data);
1981 data.asan_vec.release ();
1982 data.asan_decl_vec.release ();
1985 fini_vars_expansion ();
1987 /* If there were any artificial non-ignored vars without rtl
1988 found earlier, see if deferred stack allocation hasn't assigned
1989 rtl to them. */
1990 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1992 rtx rtl = DECL_RTL_IF_SET (var);
1994 /* Keep artificial non-ignored vars in cfun->local_decls
1995 chain until instantiate_decls. */
1996 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1997 add_local_decl (cfun, var);
1999 maybe_local_decls.release ();
2001 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2002 if (STACK_ALIGNMENT_NEEDED)
2004 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2005 if (!FRAME_GROWS_DOWNWARD)
2006 frame_offset += align - 1;
2007 frame_offset &= -align;
2010 return var_end_seq;
2014 /* If we need to produce a detailed dump, print the tree representation
2015 for STMT to the dump file. SINCE is the last RTX after which the RTL
2016 generated for STMT should have been appended. */
2018 static void
2019 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
2021 if (dump_file && (dump_flags & TDF_DETAILS))
2023 fprintf (dump_file, "\n;; ");
2024 print_gimple_stmt (dump_file, stmt, 0,
2025 TDF_SLIM | (dump_flags & TDF_LINENO));
2026 fprintf (dump_file, "\n");
2028 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2032 /* Maps the blocks that do not contain tree labels to rtx labels. */
2034 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2036 /* Returns the label_rtx expression for a label starting basic block BB. */
2038 static rtx
2039 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2041 gimple_stmt_iterator gsi;
2042 tree lab;
2044 if (bb->flags & BB_RTL)
2045 return block_label (bb);
2047 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2048 if (elt)
2049 return *elt;
2051 /* Find the tree label if it is present. */
2053 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2055 glabel *lab_stmt;
2057 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2058 if (!lab_stmt)
2059 break;
2061 lab = gimple_label_label (lab_stmt);
2062 if (DECL_NONLOCAL (lab))
2063 break;
2065 return label_rtx (lab);
2068 rtx_code_label *l = gen_label_rtx ();
2069 lab_rtx_for_bb->put (bb, l);
2070 return l;
2074 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2075 of a basic block where we just expanded the conditional at the end,
2076 possibly clean up the CFG and instruction sequence. LAST is the
2077 last instruction before the just emitted jump sequence. */
2079 static void
2080 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2082 /* Special case: when jumpif decides that the condition is
2083 trivial it emits an unconditional jump (and the necessary
2084 barrier). But we still have two edges, the fallthru one is
2085 wrong. purge_dead_edges would clean this up later. Unfortunately
2086 we have to insert insns (and split edges) before
2087 find_many_sub_basic_blocks and hence before purge_dead_edges.
2088 But splitting edges might create new blocks which depend on the
2089 fact that if there are two edges there's no barrier. So the
2090 barrier would get lost and verify_flow_info would ICE. Instead
2091 of auditing all edge splitters to care for the barrier (which
2092 normally isn't there in a cleaned CFG), fix it here. */
2093 if (BARRIER_P (get_last_insn ()))
2095 rtx_insn *insn;
2096 remove_edge (e);
2097 /* Now, we have a single successor block, if we have insns to
2098 insert on the remaining edge we potentially will insert
2099 it at the end of this block (if the dest block isn't feasible)
2100 in order to avoid splitting the edge. This insertion will take
2101 place in front of the last jump. But we might have emitted
2102 multiple jumps (conditional and one unconditional) to the
2103 same destination. Inserting in front of the last one then
2104 is a problem. See PR 40021. We fix this by deleting all
2105 jumps except the last unconditional one. */
2106 insn = PREV_INSN (get_last_insn ());
2107 /* Make sure we have an unconditional jump. Otherwise we're
2108 confused. */
2109 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2110 for (insn = PREV_INSN (insn); insn != last;)
2112 insn = PREV_INSN (insn);
2113 if (JUMP_P (NEXT_INSN (insn)))
2115 if (!any_condjump_p (NEXT_INSN (insn)))
2117 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2118 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2120 delete_insn (NEXT_INSN (insn));
2126 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2127 Returns a new basic block if we've terminated the current basic
2128 block and created a new one. */
2130 static basic_block
2131 expand_gimple_cond (basic_block bb, gcond *stmt)
2133 basic_block new_bb, dest;
2134 edge new_edge;
2135 edge true_edge;
2136 edge false_edge;
2137 rtx_insn *last2, *last;
2138 enum tree_code code;
2139 tree op0, op1;
2141 code = gimple_cond_code (stmt);
2142 op0 = gimple_cond_lhs (stmt);
2143 op1 = gimple_cond_rhs (stmt);
2144 /* We're sometimes presented with such code:
2145 D.123_1 = x < y;
2146 if (D.123_1 != 0)
2148 This would expand to two comparisons which then later might
2149 be cleaned up by combine. But some pattern matchers like if-conversion
2150 work better when there's only one compare, so make up for this
2151 here as special exception if TER would have made the same change. */
2152 if (SA.values
2153 && TREE_CODE (op0) == SSA_NAME
2154 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2155 && TREE_CODE (op1) == INTEGER_CST
2156 && ((gimple_cond_code (stmt) == NE_EXPR
2157 && integer_zerop (op1))
2158 || (gimple_cond_code (stmt) == EQ_EXPR
2159 && integer_onep (op1)))
2160 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2162 gimple second = SSA_NAME_DEF_STMT (op0);
2163 if (gimple_code (second) == GIMPLE_ASSIGN)
2165 enum tree_code code2 = gimple_assign_rhs_code (second);
2166 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2168 code = code2;
2169 op0 = gimple_assign_rhs1 (second);
2170 op1 = gimple_assign_rhs2 (second);
2172 /* If jumps are cheap and the target does not support conditional
2173 compare, turn some more codes into jumpy sequences. */
2174 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2175 && targetm.gen_ccmp_first == NULL)
2177 if ((code2 == BIT_AND_EXPR
2178 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2179 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2180 || code2 == TRUTH_AND_EXPR)
2182 code = TRUTH_ANDIF_EXPR;
2183 op0 = gimple_assign_rhs1 (second);
2184 op1 = gimple_assign_rhs2 (second);
2186 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2188 code = TRUTH_ORIF_EXPR;
2189 op0 = gimple_assign_rhs1 (second);
2190 op1 = gimple_assign_rhs2 (second);
2196 last2 = last = get_last_insn ();
2198 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2199 set_curr_insn_location (gimple_location (stmt));
2201 /* These flags have no purpose in RTL land. */
2202 true_edge->flags &= ~EDGE_TRUE_VALUE;
2203 false_edge->flags &= ~EDGE_FALSE_VALUE;
2205 /* We can either have a pure conditional jump with one fallthru edge or
2206 two-way jump that needs to be decomposed into two basic blocks. */
2207 if (false_edge->dest == bb->next_bb)
2209 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2210 true_edge->probability);
2211 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2212 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2213 set_curr_insn_location (true_edge->goto_locus);
2214 false_edge->flags |= EDGE_FALLTHRU;
2215 maybe_cleanup_end_of_block (false_edge, last);
2216 return NULL;
2218 if (true_edge->dest == bb->next_bb)
2220 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2221 false_edge->probability);
2222 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2223 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2224 set_curr_insn_location (false_edge->goto_locus);
2225 true_edge->flags |= EDGE_FALLTHRU;
2226 maybe_cleanup_end_of_block (true_edge, last);
2227 return NULL;
2230 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2231 true_edge->probability);
2232 last = get_last_insn ();
2233 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2234 set_curr_insn_location (false_edge->goto_locus);
2235 emit_jump (label_rtx_for_bb (false_edge->dest));
2237 BB_END (bb) = last;
2238 if (BARRIER_P (BB_END (bb)))
2239 BB_END (bb) = PREV_INSN (BB_END (bb));
2240 update_bb_for_insn (bb);
2242 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2243 dest = false_edge->dest;
2244 redirect_edge_succ (false_edge, new_bb);
2245 false_edge->flags |= EDGE_FALLTHRU;
2246 new_bb->count = false_edge->count;
2247 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2248 add_bb_to_loop (new_bb, bb->loop_father);
2249 new_edge = make_edge (new_bb, dest, 0);
2250 new_edge->probability = REG_BR_PROB_BASE;
2251 new_edge->count = new_bb->count;
2252 if (BARRIER_P (BB_END (new_bb)))
2253 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2254 update_bb_for_insn (new_bb);
2256 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2258 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2260 set_curr_insn_location (true_edge->goto_locus);
2261 true_edge->goto_locus = curr_insn_location ();
2264 return new_bb;
2267 /* Mark all calls that can have a transaction restart. */
2269 static void
2270 mark_transaction_restart_calls (gimple stmt)
2272 struct tm_restart_node dummy;
2273 tm_restart_node **slot;
2275 if (!cfun->gimple_df->tm_restart)
2276 return;
2278 dummy.stmt = stmt;
2279 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2280 if (slot)
2282 struct tm_restart_node *n = *slot;
2283 tree list = n->label_or_list;
2284 rtx_insn *insn;
2286 for (insn = next_real_insn (get_last_insn ());
2287 !CALL_P (insn);
2288 insn = next_real_insn (insn))
2289 continue;
2291 if (TREE_CODE (list) == LABEL_DECL)
2292 add_reg_note (insn, REG_TM, label_rtx (list));
2293 else
2294 for (; list ; list = TREE_CHAIN (list))
2295 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2299 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2300 statement STMT. */
2302 static void
2303 expand_call_stmt (gcall *stmt)
2305 tree exp, decl, lhs;
2306 bool builtin_p;
2307 size_t i;
2309 if (gimple_call_internal_p (stmt))
2311 expand_internal_call (stmt);
2312 return;
2315 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2317 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2318 decl = gimple_call_fndecl (stmt);
2319 builtin_p = decl && DECL_BUILT_IN (decl);
2321 /* If this is not a builtin function, the function type through which the
2322 call is made may be different from the type of the function. */
2323 if (!builtin_p)
2324 CALL_EXPR_FN (exp)
2325 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2326 CALL_EXPR_FN (exp));
2328 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2329 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2331 for (i = 0; i < gimple_call_num_args (stmt); i++)
2333 tree arg = gimple_call_arg (stmt, i);
2334 gimple def;
2335 /* TER addresses into arguments of builtin functions so we have a
2336 chance to infer more correct alignment information. See PR39954. */
2337 if (builtin_p
2338 && TREE_CODE (arg) == SSA_NAME
2339 && (def = get_gimple_for_ssa_name (arg))
2340 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2341 arg = gimple_assign_rhs1 (def);
2342 CALL_EXPR_ARG (exp, i) = arg;
2345 if (gimple_has_side_effects (stmt))
2346 TREE_SIDE_EFFECTS (exp) = 1;
2348 if (gimple_call_nothrow_p (stmt))
2349 TREE_NOTHROW (exp) = 1;
2351 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2352 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2353 if (decl
2354 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2355 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2356 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2357 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2358 else
2359 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2360 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2361 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2362 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2364 /* Ensure RTL is created for debug args. */
2365 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2367 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2368 unsigned int ix;
2369 tree dtemp;
2371 if (debug_args)
2372 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2374 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2375 expand_debug_expr (dtemp);
2379 lhs = gimple_call_lhs (stmt);
2380 if (lhs)
2381 expand_assignment (lhs, exp, false);
2382 else
2383 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2385 mark_transaction_restart_calls (stmt);
2389 /* Generate RTL for an asm statement (explicit assembler code).
2390 STRING is a STRING_CST node containing the assembler code text,
2391 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2392 insn is volatile; don't optimize it. */
2394 static void
2395 expand_asm_loc (tree string, int vol, location_t locus)
2397 rtx body;
2399 if (TREE_CODE (string) == ADDR_EXPR)
2400 string = TREE_OPERAND (string, 0);
2402 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2403 ggc_strdup (TREE_STRING_POINTER (string)),
2404 locus);
2406 MEM_VOLATILE_P (body) = vol;
2408 emit_insn (body);
2411 /* Return the number of times character C occurs in string S. */
2412 static int
2413 n_occurrences (int c, const char *s)
2415 int n = 0;
2416 while (*s)
2417 n += (*s++ == c);
2418 return n;
2421 /* A subroutine of expand_asm_operands. Check that all operands have
2422 the same number of alternatives. Return true if so. */
2424 static bool
2425 check_operand_nalternatives (tree outputs, tree inputs)
2427 if (outputs || inputs)
2429 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2430 int nalternatives
2431 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2432 tree next = inputs;
2434 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2436 error ("too many alternatives in %<asm%>");
2437 return false;
2440 tmp = outputs;
2441 while (tmp)
2443 const char *constraint
2444 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2446 if (n_occurrences (',', constraint) != nalternatives)
2448 error ("operand constraints for %<asm%> differ "
2449 "in number of alternatives");
2450 return false;
2453 if (TREE_CHAIN (tmp))
2454 tmp = TREE_CHAIN (tmp);
2455 else
2456 tmp = next, next = 0;
2460 return true;
2463 /* Check for overlap between registers marked in CLOBBERED_REGS and
2464 anything inappropriate in T. Emit error and return the register
2465 variable definition for error, NULL_TREE for ok. */
2467 static bool
2468 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2470 /* Conflicts between asm-declared register variables and the clobber
2471 list are not allowed. */
2472 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2474 if (overlap)
2476 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2477 DECL_NAME (overlap));
2479 /* Reset registerness to stop multiple errors emitted for a single
2480 variable. */
2481 DECL_REGISTER (overlap) = 0;
2482 return true;
2485 return false;
2488 /* Generate RTL for an asm statement with arguments.
2489 STRING is the instruction template.
2490 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2491 Each output or input has an expression in the TREE_VALUE and
2492 a tree list in TREE_PURPOSE which in turn contains a constraint
2493 name in TREE_VALUE (or NULL_TREE) and a constraint string
2494 in TREE_PURPOSE.
2495 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2496 that is clobbered by this insn.
2498 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2499 should be the fallthru basic block of the asm goto.
2501 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2502 Some elements of OUTPUTS may be replaced with trees representing temporary
2503 values. The caller should copy those temporary values to the originally
2504 specified lvalues.
2506 VOL nonzero means the insn is volatile; don't optimize it. */
2508 static void
2509 expand_asm_operands (tree string, tree outputs, tree inputs,
2510 tree clobbers, tree labels, basic_block fallthru_bb,
2511 int vol, location_t locus)
2513 rtvec argvec, constraintvec, labelvec;
2514 rtx body;
2515 int ninputs = list_length (inputs);
2516 int noutputs = list_length (outputs);
2517 int nlabels = list_length (labels);
2518 int ninout;
2519 int nclobbers;
2520 HARD_REG_SET clobbered_regs;
2521 int clobber_conflict_found = 0;
2522 tree tail;
2523 tree t;
2524 int i;
2525 /* Vector of RTX's of evaluated output operands. */
2526 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2527 int *inout_opnum = XALLOCAVEC (int, noutputs);
2528 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2529 machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2530 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2531 int old_generating_concat_p = generating_concat_p;
2532 rtx_code_label *fallthru_label = NULL;
2534 /* An ASM with no outputs needs to be treated as volatile, for now. */
2535 if (noutputs == 0)
2536 vol = 1;
2538 if (! check_operand_nalternatives (outputs, inputs))
2539 return;
2541 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2543 /* Collect constraints. */
2544 i = 0;
2545 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2546 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2547 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2548 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2550 /* Sometimes we wish to automatically clobber registers across an asm.
2551 Case in point is when the i386 backend moved from cc0 to a hard reg --
2552 maintaining source-level compatibility means automatically clobbering
2553 the flags register. */
2554 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2556 /* Count the number of meaningful clobbered registers, ignoring what
2557 we would ignore later. */
2558 nclobbers = 0;
2559 CLEAR_HARD_REG_SET (clobbered_regs);
2560 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2562 const char *regname;
2563 int nregs;
2565 if (TREE_VALUE (tail) == error_mark_node)
2566 return;
2567 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2569 i = decode_reg_name_and_count (regname, &nregs);
2570 if (i == -4)
2571 ++nclobbers;
2572 else if (i == -2)
2573 error ("unknown register name %qs in %<asm%>", regname);
2575 /* Mark clobbered registers. */
2576 if (i >= 0)
2578 int reg;
2580 for (reg = i; reg < i + nregs; reg++)
2582 ++nclobbers;
2584 /* Clobbering the PIC register is an error. */
2585 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2587 error ("PIC register clobbered by %qs in %<asm%>", regname);
2588 return;
2591 SET_HARD_REG_BIT (clobbered_regs, reg);
2596 /* First pass over inputs and outputs checks validity and sets
2597 mark_addressable if needed. */
2599 ninout = 0;
2600 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2602 tree val = TREE_VALUE (tail);
2603 tree type = TREE_TYPE (val);
2604 const char *constraint;
2605 bool is_inout;
2606 bool allows_reg;
2607 bool allows_mem;
2609 /* If there's an erroneous arg, emit no insn. */
2610 if (type == error_mark_node)
2611 return;
2613 /* Try to parse the output constraint. If that fails, there's
2614 no point in going further. */
2615 constraint = constraints[i];
2616 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2617 &allows_mem, &allows_reg, &is_inout))
2618 return;
2620 if (! allows_reg
2621 && (allows_mem
2622 || is_inout
2623 || (DECL_P (val)
2624 && REG_P (DECL_RTL (val))
2625 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2626 mark_addressable (val);
2628 if (is_inout)
2629 ninout++;
2632 ninputs += ninout;
2633 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2635 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2636 return;
2639 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2641 bool allows_reg, allows_mem;
2642 const char *constraint;
2644 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2645 would get VOIDmode and that could cause a crash in reload. */
2646 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2647 return;
2649 constraint = constraints[i + noutputs];
2650 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2651 constraints, &allows_mem, &allows_reg))
2652 return;
2654 if (! allows_reg && allows_mem)
2655 mark_addressable (TREE_VALUE (tail));
2658 /* Second pass evaluates arguments. */
2660 /* Make sure stack is consistent for asm goto. */
2661 if (nlabels > 0)
2662 do_pending_stack_adjust ();
2664 ninout = 0;
2665 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2667 tree val = TREE_VALUE (tail);
2668 tree type = TREE_TYPE (val);
2669 bool is_inout;
2670 bool allows_reg;
2671 bool allows_mem;
2672 rtx op;
2673 bool ok;
2675 ok = parse_output_constraint (&constraints[i], i, ninputs,
2676 noutputs, &allows_mem, &allows_reg,
2677 &is_inout);
2678 gcc_assert (ok);
2680 /* If an output operand is not a decl or indirect ref and our constraint
2681 allows a register, make a temporary to act as an intermediate.
2682 Make the asm insn write into that, then our caller will copy it to
2683 the real output operand. Likewise for promoted variables. */
2685 generating_concat_p = 0;
2687 real_output_rtx[i] = NULL_RTX;
2688 if ((TREE_CODE (val) == INDIRECT_REF
2689 && allows_mem)
2690 || (DECL_P (val)
2691 && (allows_mem || REG_P (DECL_RTL (val)))
2692 && ! (REG_P (DECL_RTL (val))
2693 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2694 || ! allows_reg
2695 || is_inout)
2697 op = expand_expr (val, NULL_RTX, VOIDmode,
2698 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2699 if (MEM_P (op))
2700 op = validize_mem (op);
2702 if (! allows_reg && !MEM_P (op))
2703 error ("output number %d not directly addressable", i);
2704 if ((! allows_mem && MEM_P (op))
2705 || GET_CODE (op) == CONCAT)
2707 real_output_rtx[i] = op;
2708 op = gen_reg_rtx (GET_MODE (op));
2709 if (is_inout)
2710 emit_move_insn (op, real_output_rtx[i]);
2713 else
2715 op = assign_temp (type, 0, 1);
2716 op = validize_mem (op);
2717 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2718 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2719 TREE_VALUE (tail) = make_tree (type, op);
2721 output_rtx[i] = op;
2723 generating_concat_p = old_generating_concat_p;
2725 if (is_inout)
2727 inout_mode[ninout] = TYPE_MODE (type);
2728 inout_opnum[ninout++] = i;
2731 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2732 clobber_conflict_found = 1;
2735 /* Make vectors for the expression-rtx, constraint strings,
2736 and named operands. */
2738 argvec = rtvec_alloc (ninputs);
2739 constraintvec = rtvec_alloc (ninputs);
2740 labelvec = rtvec_alloc (nlabels);
2742 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2743 : GET_MODE (output_rtx[0])),
2744 ggc_strdup (TREE_STRING_POINTER (string)),
2745 empty_string, 0, argvec, constraintvec,
2746 labelvec, locus);
2748 MEM_VOLATILE_P (body) = vol;
2750 /* Eval the inputs and put them into ARGVEC.
2751 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2753 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2755 bool allows_reg, allows_mem;
2756 const char *constraint;
2757 tree val, type;
2758 rtx op;
2759 bool ok;
2761 constraint = constraints[i + noutputs];
2762 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2763 constraints, &allows_mem, &allows_reg);
2764 gcc_assert (ok);
2766 generating_concat_p = 0;
2768 val = TREE_VALUE (tail);
2769 type = TREE_TYPE (val);
2770 /* EXPAND_INITIALIZER will not generate code for valid initializer
2771 constants, but will still generate code for other types of operand.
2772 This is the behavior we want for constant constraints. */
2773 op = expand_expr (val, NULL_RTX, VOIDmode,
2774 allows_reg ? EXPAND_NORMAL
2775 : allows_mem ? EXPAND_MEMORY
2776 : EXPAND_INITIALIZER);
2778 /* Never pass a CONCAT to an ASM. */
2779 if (GET_CODE (op) == CONCAT)
2780 op = force_reg (GET_MODE (op), op);
2781 else if (MEM_P (op))
2782 op = validize_mem (op);
2784 if (asm_operand_ok (op, constraint, NULL) <= 0)
2786 if (allows_reg && TYPE_MODE (type) != BLKmode)
2787 op = force_reg (TYPE_MODE (type), op);
2788 else if (!allows_mem)
2789 warning (0, "asm operand %d probably doesn%'t match constraints",
2790 i + noutputs);
2791 else if (MEM_P (op))
2793 /* We won't recognize either volatile memory or memory
2794 with a queued address as available a memory_operand
2795 at this point. Ignore it: clearly this *is* a memory. */
2797 else
2798 gcc_unreachable ();
2801 generating_concat_p = old_generating_concat_p;
2802 ASM_OPERANDS_INPUT (body, i) = op;
2804 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2805 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2806 ggc_strdup (constraints[i + noutputs]),
2807 locus);
2809 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2810 clobber_conflict_found = 1;
2813 /* Protect all the operands from the queue now that they have all been
2814 evaluated. */
2816 generating_concat_p = 0;
2818 /* For in-out operands, copy output rtx to input rtx. */
2819 for (i = 0; i < ninout; i++)
2821 int j = inout_opnum[i];
2822 char buffer[16];
2824 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2825 = output_rtx[j];
2827 sprintf (buffer, "%d", j);
2828 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2829 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2832 /* Copy labels to the vector. */
2833 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2835 rtx r;
2836 /* If asm goto has any labels in the fallthru basic block, use
2837 a label that we emit immediately after the asm goto. Expansion
2838 may insert further instructions into the same basic block after
2839 asm goto and if we don't do this, insertion of instructions on
2840 the fallthru edge might misbehave. See PR58670. */
2841 if (fallthru_bb
2842 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2844 if (fallthru_label == NULL_RTX)
2845 fallthru_label = gen_label_rtx ();
2846 r = fallthru_label;
2848 else
2849 r = label_rtx (TREE_VALUE (tail));
2850 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2853 generating_concat_p = old_generating_concat_p;
2855 /* Now, for each output, construct an rtx
2856 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2857 ARGVEC CONSTRAINTS OPNAMES))
2858 If there is more than one, put them inside a PARALLEL. */
2860 if (nlabels > 0 && nclobbers == 0)
2862 gcc_assert (noutputs == 0);
2863 emit_jump_insn (body);
2865 else if (noutputs == 0 && nclobbers == 0)
2867 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2868 emit_insn (body);
2870 else if (noutputs == 1 && nclobbers == 0)
2872 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2873 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2875 else
2877 rtx obody = body;
2878 int num = noutputs;
2880 if (num == 0)
2881 num = 1;
2883 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2885 /* For each output operand, store a SET. */
2886 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2888 XVECEXP (body, 0, i)
2889 = gen_rtx_SET (VOIDmode,
2890 output_rtx[i],
2891 gen_rtx_ASM_OPERANDS
2892 (GET_MODE (output_rtx[i]),
2893 ggc_strdup (TREE_STRING_POINTER (string)),
2894 ggc_strdup (constraints[i]),
2895 i, argvec, constraintvec, labelvec, locus));
2897 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2900 /* If there are no outputs (but there are some clobbers)
2901 store the bare ASM_OPERANDS into the PARALLEL. */
2903 if (i == 0)
2904 XVECEXP (body, 0, i++) = obody;
2906 /* Store (clobber REG) for each clobbered register specified. */
2908 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2910 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2911 int reg, nregs;
2912 int j = decode_reg_name_and_count (regname, &nregs);
2913 rtx clobbered_reg;
2915 if (j < 0)
2917 if (j == -3) /* `cc', which is not a register */
2918 continue;
2920 if (j == -4) /* `memory', don't cache memory across asm */
2922 XVECEXP (body, 0, i++)
2923 = gen_rtx_CLOBBER (VOIDmode,
2924 gen_rtx_MEM
2925 (BLKmode,
2926 gen_rtx_SCRATCH (VOIDmode)));
2927 continue;
2930 /* Ignore unknown register, error already signaled. */
2931 continue;
2934 for (reg = j; reg < j + nregs; reg++)
2936 /* Use QImode since that's guaranteed to clobber just
2937 * one reg. */
2938 clobbered_reg = gen_rtx_REG (QImode, reg);
2940 /* Do sanity check for overlap between clobbers and
2941 respectively input and outputs that hasn't been
2942 handled. Such overlap should have been detected and
2943 reported above. */
2944 if (!clobber_conflict_found)
2946 int opno;
2948 /* We test the old body (obody) contents to avoid
2949 tripping over the under-construction body. */
2950 for (opno = 0; opno < noutputs; opno++)
2951 if (reg_overlap_mentioned_p (clobbered_reg,
2952 output_rtx[opno]))
2953 internal_error
2954 ("asm clobber conflict with output operand");
2956 for (opno = 0; opno < ninputs - ninout; opno++)
2957 if (reg_overlap_mentioned_p (clobbered_reg,
2958 ASM_OPERANDS_INPUT (obody,
2959 opno)))
2960 internal_error
2961 ("asm clobber conflict with input operand");
2964 XVECEXP (body, 0, i++)
2965 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2969 if (nlabels > 0)
2970 emit_jump_insn (body);
2971 else
2972 emit_insn (body);
2975 if (fallthru_label)
2976 emit_label (fallthru_label);
2978 /* For any outputs that needed reloading into registers, spill them
2979 back to where they belong. */
2980 for (i = 0; i < noutputs; ++i)
2981 if (real_output_rtx[i])
2982 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2984 crtl->has_asm_statement = 1;
2985 free_temp_slots ();
2989 static void
2990 expand_asm_stmt (gasm *stmt)
2992 int noutputs;
2993 tree outputs, tail, t;
2994 tree *o;
2995 size_t i, n;
2996 const char *s;
2997 tree str, out, in, cl, labels;
2998 location_t locus = gimple_location (stmt);
2999 basic_block fallthru_bb = NULL;
3001 /* Meh... convert the gimple asm operands into real tree lists.
3002 Eventually we should make all routines work on the vectors instead
3003 of relying on TREE_CHAIN. */
3004 out = NULL_TREE;
3005 n = gimple_asm_noutputs (stmt);
3006 if (n > 0)
3008 t = out = gimple_asm_output_op (stmt, 0);
3009 for (i = 1; i < n; i++)
3010 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
3013 in = NULL_TREE;
3014 n = gimple_asm_ninputs (stmt);
3015 if (n > 0)
3017 t = in = gimple_asm_input_op (stmt, 0);
3018 for (i = 1; i < n; i++)
3019 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
3022 cl = NULL_TREE;
3023 n = gimple_asm_nclobbers (stmt);
3024 if (n > 0)
3026 t = cl = gimple_asm_clobber_op (stmt, 0);
3027 for (i = 1; i < n; i++)
3028 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
3031 labels = NULL_TREE;
3032 n = gimple_asm_nlabels (stmt);
3033 if (n > 0)
3035 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3036 if (fallthru)
3037 fallthru_bb = fallthru->dest;
3038 t = labels = gimple_asm_label_op (stmt, 0);
3039 for (i = 1; i < n; i++)
3040 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3043 s = gimple_asm_string (stmt);
3044 str = build_string (strlen (s), s);
3046 if (gimple_asm_input_p (stmt))
3048 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3049 return;
3052 outputs = out;
3053 noutputs = gimple_asm_noutputs (stmt);
3054 /* o[I] is the place that output number I should be written. */
3055 o = (tree *) alloca (noutputs * sizeof (tree));
3057 /* Record the contents of OUTPUTS before it is modified. */
3058 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3059 o[i] = TREE_VALUE (tail);
3061 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3062 OUTPUTS some trees for where the values were actually stored. */
3063 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3064 gimple_asm_volatile_p (stmt), locus);
3066 /* Copy all the intermediate outputs into the specified outputs. */
3067 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3069 if (o[i] != TREE_VALUE (tail))
3071 expand_assignment (o[i], TREE_VALUE (tail), false);
3072 free_temp_slots ();
3074 /* Restore the original value so that it's correct the next
3075 time we expand this function. */
3076 TREE_VALUE (tail) = o[i];
3081 /* Emit code to jump to the address
3082 specified by the pointer expression EXP. */
3084 static void
3085 expand_computed_goto (tree exp)
3087 rtx x = expand_normal (exp);
3089 do_pending_stack_adjust ();
3090 emit_indirect_jump (x);
3093 /* Generate RTL code for a `goto' statement with target label LABEL.
3094 LABEL should be a LABEL_DECL tree node that was or will later be
3095 defined with `expand_label'. */
3097 static void
3098 expand_goto (tree label)
3100 #ifdef ENABLE_CHECKING
3101 /* Check for a nonlocal goto to a containing function. Should have
3102 gotten translated to __builtin_nonlocal_goto. */
3103 tree context = decl_function_context (label);
3104 gcc_assert (!context || context == current_function_decl);
3105 #endif
3107 emit_jump (label_rtx (label));
3110 /* Output a return with no value. */
3112 static void
3113 expand_null_return_1 (void)
3115 clear_pending_stack_adjust ();
3116 do_pending_stack_adjust ();
3117 emit_jump (return_label);
3120 /* Generate RTL to return from the current function, with no value.
3121 (That is, we do not do anything about returning any value.) */
3123 void
3124 expand_null_return (void)
3126 /* If this function was declared to return a value, but we
3127 didn't, clobber the return registers so that they are not
3128 propagated live to the rest of the function. */
3129 clobber_return_register ();
3131 expand_null_return_1 ();
3134 /* Generate RTL to return from the current function, with value VAL. */
3136 static void
3137 expand_value_return (rtx val)
3139 /* Copy the value to the return location unless it's already there. */
3141 tree decl = DECL_RESULT (current_function_decl);
3142 rtx return_reg = DECL_RTL (decl);
3143 if (return_reg != val)
3145 tree funtype = TREE_TYPE (current_function_decl);
3146 tree type = TREE_TYPE (decl);
3147 int unsignedp = TYPE_UNSIGNED (type);
3148 machine_mode old_mode = DECL_MODE (decl);
3149 machine_mode mode;
3150 if (DECL_BY_REFERENCE (decl))
3151 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3152 else
3153 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3155 if (mode != old_mode)
3156 val = convert_modes (mode, old_mode, val, unsignedp);
3158 if (GET_CODE (return_reg) == PARALLEL)
3159 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3160 else
3161 emit_move_insn (return_reg, val);
3164 expand_null_return_1 ();
3167 /* Generate RTL to evaluate the expression RETVAL and return it
3168 from the current function. */
3170 static void
3171 expand_return (tree retval, tree bounds)
3173 rtx result_rtl;
3174 rtx val = 0;
3175 tree retval_rhs;
3176 rtx bounds_rtl;
3178 /* If function wants no value, give it none. */
3179 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3181 expand_normal (retval);
3182 expand_null_return ();
3183 return;
3186 if (retval == error_mark_node)
3188 /* Treat this like a return of no value from a function that
3189 returns a value. */
3190 expand_null_return ();
3191 return;
3193 else if ((TREE_CODE (retval) == MODIFY_EXPR
3194 || TREE_CODE (retval) == INIT_EXPR)
3195 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3196 retval_rhs = TREE_OPERAND (retval, 1);
3197 else
3198 retval_rhs = retval;
3200 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3202 /* Put returned bounds to the right place. */
3203 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3204 if (bounds_rtl)
3206 rtx addr, bnd;
3208 if (bounds)
3210 bnd = expand_normal (bounds);
3211 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3213 else if (REG_P (bounds_rtl))
3215 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3216 addr = gen_rtx_MEM (Pmode, addr);
3217 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3218 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3220 else
3222 int n;
3224 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3226 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3227 addr = gen_rtx_MEM (Pmode, addr);
3229 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3231 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3232 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3233 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3234 rtx bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3235 targetm.calls.store_returned_bounds (slot, bnd);
3239 else if (chkp_function_instrumented_p (current_function_decl)
3240 && !BOUNDED_P (retval_rhs)
3241 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3242 && TREE_CODE (retval_rhs) != RESULT_DECL)
3244 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3245 addr = gen_rtx_MEM (Pmode, addr);
3247 gcc_assert (MEM_P (result_rtl));
3249 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3252 /* If we are returning the RESULT_DECL, then the value has already
3253 been stored into it, so we don't have to do anything special. */
3254 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3255 expand_value_return (result_rtl);
3257 /* If the result is an aggregate that is being returned in one (or more)
3258 registers, load the registers here. */
3260 else if (retval_rhs != 0
3261 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3262 && REG_P (result_rtl))
3264 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3265 if (val)
3267 /* Use the mode of the result value on the return register. */
3268 PUT_MODE (result_rtl, GET_MODE (val));
3269 expand_value_return (val);
3271 else
3272 expand_null_return ();
3274 else if (retval_rhs != 0
3275 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3276 && (REG_P (result_rtl)
3277 || (GET_CODE (result_rtl) == PARALLEL)))
3279 /* Compute the return value into a temporary (usually a pseudo reg). */
3281 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3282 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3283 val = force_not_mem (val);
3284 expand_value_return (val);
3286 else
3288 /* No hard reg used; calculate value into hard return reg. */
3289 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3290 expand_value_return (result_rtl);
3294 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3295 STMT that doesn't require special handling for outgoing edges. That
3296 is no tailcalls and no GIMPLE_COND. */
3298 static void
3299 expand_gimple_stmt_1 (gimple stmt)
3301 tree op0;
3303 set_curr_insn_location (gimple_location (stmt));
3305 switch (gimple_code (stmt))
3307 case GIMPLE_GOTO:
3308 op0 = gimple_goto_dest (stmt);
3309 if (TREE_CODE (op0) == LABEL_DECL)
3310 expand_goto (op0);
3311 else
3312 expand_computed_goto (op0);
3313 break;
3314 case GIMPLE_LABEL:
3315 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3316 break;
3317 case GIMPLE_NOP:
3318 case GIMPLE_PREDICT:
3319 break;
3320 case GIMPLE_SWITCH:
3321 expand_case (as_a <gswitch *> (stmt));
3322 break;
3323 case GIMPLE_ASM:
3324 expand_asm_stmt (as_a <gasm *> (stmt));
3325 break;
3326 case GIMPLE_CALL:
3327 expand_call_stmt (as_a <gcall *> (stmt));
3328 break;
3330 case GIMPLE_RETURN:
3331 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3333 if (op0 && op0 != error_mark_node)
3335 tree result = DECL_RESULT (current_function_decl);
3337 /* If we are not returning the current function's RESULT_DECL,
3338 build an assignment to it. */
3339 if (op0 != result)
3341 /* I believe that a function's RESULT_DECL is unique. */
3342 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3344 /* ??? We'd like to use simply expand_assignment here,
3345 but this fails if the value is of BLKmode but the return
3346 decl is a register. expand_return has special handling
3347 for this combination, which eventually should move
3348 to common code. See comments there. Until then, let's
3349 build a modify expression :-/ */
3350 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3351 result, op0);
3354 if (!op0)
3355 expand_null_return ();
3356 else
3357 expand_return (op0, gimple_return_retbnd (stmt));
3358 break;
3360 case GIMPLE_ASSIGN:
3362 gassign *assign_stmt = as_a <gassign *> (stmt);
3363 tree lhs = gimple_assign_lhs (assign_stmt);
3365 /* Tree expand used to fiddle with |= and &= of two bitfield
3366 COMPONENT_REFs here. This can't happen with gimple, the LHS
3367 of binary assigns must be a gimple reg. */
3369 if (TREE_CODE (lhs) != SSA_NAME
3370 || get_gimple_rhs_class (gimple_expr_code (stmt))
3371 == GIMPLE_SINGLE_RHS)
3373 tree rhs = gimple_assign_rhs1 (assign_stmt);
3374 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3375 == GIMPLE_SINGLE_RHS);
3376 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3377 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3378 if (TREE_CLOBBER_P (rhs))
3379 /* This is a clobber to mark the going out of scope for
3380 this LHS. */
3382 else
3383 expand_assignment (lhs, rhs,
3384 gimple_assign_nontemporal_move_p (
3385 assign_stmt));
3387 else
3389 rtx target, temp;
3390 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3391 struct separate_ops ops;
3392 bool promoted = false;
3394 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3395 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3396 promoted = true;
3398 ops.code = gimple_assign_rhs_code (assign_stmt);
3399 ops.type = TREE_TYPE (lhs);
3400 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3402 case GIMPLE_TERNARY_RHS:
3403 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3404 /* Fallthru */
3405 case GIMPLE_BINARY_RHS:
3406 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3407 /* Fallthru */
3408 case GIMPLE_UNARY_RHS:
3409 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3410 break;
3411 default:
3412 gcc_unreachable ();
3414 ops.location = gimple_location (stmt);
3416 /* If we want to use a nontemporal store, force the value to
3417 register first. If we store into a promoted register,
3418 don't directly expand to target. */
3419 temp = nontemporal || promoted ? NULL_RTX : target;
3420 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3421 EXPAND_NORMAL);
3423 if (temp == target)
3425 else if (promoted)
3427 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3428 /* If TEMP is a VOIDmode constant, use convert_modes to make
3429 sure that we properly convert it. */
3430 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3432 temp = convert_modes (GET_MODE (target),
3433 TYPE_MODE (ops.type),
3434 temp, unsignedp);
3435 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3436 GET_MODE (target), temp, unsignedp);
3439 convert_move (SUBREG_REG (target), temp, unsignedp);
3441 else if (nontemporal && emit_storent_insn (target, temp))
3443 else
3445 temp = force_operand (temp, target);
3446 if (temp != target)
3447 emit_move_insn (target, temp);
3451 break;
3453 default:
3454 gcc_unreachable ();
3458 /* Expand one gimple statement STMT and return the last RTL instruction
3459 before any of the newly generated ones.
3461 In addition to generating the necessary RTL instructions this also
3462 sets REG_EH_REGION notes if necessary and sets the current source
3463 location for diagnostics. */
3465 static rtx_insn *
3466 expand_gimple_stmt (gimple stmt)
3468 location_t saved_location = input_location;
3469 rtx_insn *last = get_last_insn ();
3470 int lp_nr;
3472 gcc_assert (cfun);
3474 /* We need to save and restore the current source location so that errors
3475 discovered during expansion are emitted with the right location. But
3476 it would be better if the diagnostic routines used the source location
3477 embedded in the tree nodes rather than globals. */
3478 if (gimple_has_location (stmt))
3479 input_location = gimple_location (stmt);
3481 expand_gimple_stmt_1 (stmt);
3483 /* Free any temporaries used to evaluate this statement. */
3484 free_temp_slots ();
3486 input_location = saved_location;
3488 /* Mark all insns that may trap. */
3489 lp_nr = lookup_stmt_eh_lp (stmt);
3490 if (lp_nr)
3492 rtx_insn *insn;
3493 for (insn = next_real_insn (last); insn;
3494 insn = next_real_insn (insn))
3496 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3497 /* If we want exceptions for non-call insns, any
3498 may_trap_p instruction may throw. */
3499 && GET_CODE (PATTERN (insn)) != CLOBBER
3500 && GET_CODE (PATTERN (insn)) != USE
3501 && insn_could_throw_p (insn))
3502 make_reg_eh_region_note (insn, 0, lp_nr);
3506 return last;
3509 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3510 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3511 generated a tail call (something that might be denied by the ABI
3512 rules governing the call; see calls.c).
3514 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3515 can still reach the rest of BB. The case here is __builtin_sqrt,
3516 where the NaN result goes through the external function (with a
3517 tailcall) and the normal result happens via a sqrt instruction. */
3519 static basic_block
3520 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3522 rtx_insn *last2, *last;
3523 edge e;
3524 edge_iterator ei;
3525 int probability;
3526 gcov_type count;
3528 last2 = last = expand_gimple_stmt (stmt);
3530 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3531 if (CALL_P (last) && SIBLING_CALL_P (last))
3532 goto found;
3534 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3536 *can_fallthru = true;
3537 return NULL;
3539 found:
3540 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3541 Any instructions emitted here are about to be deleted. */
3542 do_pending_stack_adjust ();
3544 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3545 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3546 EH or abnormal edges, we shouldn't have created a tail call in
3547 the first place. So it seems to me we should just be removing
3548 all edges here, or redirecting the existing fallthru edge to
3549 the exit block. */
3551 probability = 0;
3552 count = 0;
3554 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3556 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3558 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3560 e->dest->count -= e->count;
3561 e->dest->frequency -= EDGE_FREQUENCY (e);
3562 if (e->dest->count < 0)
3563 e->dest->count = 0;
3564 if (e->dest->frequency < 0)
3565 e->dest->frequency = 0;
3567 count += e->count;
3568 probability += e->probability;
3569 remove_edge (e);
3571 else
3572 ei_next (&ei);
3575 /* This is somewhat ugly: the call_expr expander often emits instructions
3576 after the sibcall (to perform the function return). These confuse the
3577 find_many_sub_basic_blocks code, so we need to get rid of these. */
3578 last = NEXT_INSN (last);
3579 gcc_assert (BARRIER_P (last));
3581 *can_fallthru = false;
3582 while (NEXT_INSN (last))
3584 /* For instance an sqrt builtin expander expands if with
3585 sibcall in the then and label for `else`. */
3586 if (LABEL_P (NEXT_INSN (last)))
3588 *can_fallthru = true;
3589 break;
3591 delete_insn (NEXT_INSN (last));
3594 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3595 | EDGE_SIBCALL);
3596 e->probability += probability;
3597 e->count += count;
3598 BB_END (bb) = last;
3599 update_bb_for_insn (bb);
3601 if (NEXT_INSN (last))
3603 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3605 last = BB_END (bb);
3606 if (BARRIER_P (last))
3607 BB_END (bb) = PREV_INSN (last);
3610 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3612 return bb;
3615 /* Return the difference between the floor and the truncated result of
3616 a signed division by OP1 with remainder MOD. */
3617 static rtx
3618 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3620 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3621 return gen_rtx_IF_THEN_ELSE
3622 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3623 gen_rtx_IF_THEN_ELSE
3624 (mode, gen_rtx_LT (BImode,
3625 gen_rtx_DIV (mode, op1, mod),
3626 const0_rtx),
3627 constm1_rtx, const0_rtx),
3628 const0_rtx);
3631 /* Return the difference between the ceil and the truncated result of
3632 a signed division by OP1 with remainder MOD. */
3633 static rtx
3634 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3636 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3637 return gen_rtx_IF_THEN_ELSE
3638 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3639 gen_rtx_IF_THEN_ELSE
3640 (mode, gen_rtx_GT (BImode,
3641 gen_rtx_DIV (mode, op1, mod),
3642 const0_rtx),
3643 const1_rtx, const0_rtx),
3644 const0_rtx);
3647 /* Return the difference between the ceil and the truncated result of
3648 an unsigned division by OP1 with remainder MOD. */
3649 static rtx
3650 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3652 /* (mod != 0 ? 1 : 0) */
3653 return gen_rtx_IF_THEN_ELSE
3654 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3655 const1_rtx, const0_rtx);
3658 /* Return the difference between the rounded and the truncated result
3659 of a signed division by OP1 with remainder MOD. Halfway cases are
3660 rounded away from zero, rather than to the nearest even number. */
3661 static rtx
3662 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3664 /* (abs (mod) >= abs (op1) - abs (mod)
3665 ? (op1 / mod > 0 ? 1 : -1)
3666 : 0) */
3667 return gen_rtx_IF_THEN_ELSE
3668 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3669 gen_rtx_MINUS (mode,
3670 gen_rtx_ABS (mode, op1),
3671 gen_rtx_ABS (mode, mod))),
3672 gen_rtx_IF_THEN_ELSE
3673 (mode, gen_rtx_GT (BImode,
3674 gen_rtx_DIV (mode, op1, mod),
3675 const0_rtx),
3676 const1_rtx, constm1_rtx),
3677 const0_rtx);
3680 /* Return the difference between the rounded and the truncated result
3681 of a unsigned division by OP1 with remainder MOD. Halfway cases
3682 are rounded away from zero, rather than to the nearest even
3683 number. */
3684 static rtx
3685 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3687 /* (mod >= op1 - mod ? 1 : 0) */
3688 return gen_rtx_IF_THEN_ELSE
3689 (mode, gen_rtx_GE (BImode, mod,
3690 gen_rtx_MINUS (mode, op1, mod)),
3691 const1_rtx, const0_rtx);
3694 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3695 any rtl. */
3697 static rtx
3698 convert_debug_memory_address (machine_mode mode, rtx x,
3699 addr_space_t as)
3701 machine_mode xmode = GET_MODE (x);
3703 #ifndef POINTERS_EXTEND_UNSIGNED
3704 gcc_assert (mode == Pmode
3705 || mode == targetm.addr_space.address_mode (as));
3706 gcc_assert (xmode == mode || xmode == VOIDmode);
3707 #else
3708 rtx temp;
3710 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3712 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3713 return x;
3715 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3716 x = simplify_gen_subreg (mode, x, xmode,
3717 subreg_lowpart_offset
3718 (mode, xmode));
3719 else if (POINTERS_EXTEND_UNSIGNED > 0)
3720 x = gen_rtx_ZERO_EXTEND (mode, x);
3721 else if (!POINTERS_EXTEND_UNSIGNED)
3722 x = gen_rtx_SIGN_EXTEND (mode, x);
3723 else
3725 switch (GET_CODE (x))
3727 case SUBREG:
3728 if ((SUBREG_PROMOTED_VAR_P (x)
3729 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3730 || (GET_CODE (SUBREG_REG (x)) == PLUS
3731 && REG_P (XEXP (SUBREG_REG (x), 0))
3732 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3733 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3734 && GET_MODE (SUBREG_REG (x)) == mode)
3735 return SUBREG_REG (x);
3736 break;
3737 case LABEL_REF:
3738 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3739 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3740 return temp;
3741 case SYMBOL_REF:
3742 temp = shallow_copy_rtx (x);
3743 PUT_MODE (temp, mode);
3744 return temp;
3745 case CONST:
3746 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3747 if (temp)
3748 temp = gen_rtx_CONST (mode, temp);
3749 return temp;
3750 case PLUS:
3751 case MINUS:
3752 if (CONST_INT_P (XEXP (x, 1)))
3754 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3755 if (temp)
3756 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3758 break;
3759 default:
3760 break;
3762 /* Don't know how to express ptr_extend as operation in debug info. */
3763 return NULL;
3765 #endif /* POINTERS_EXTEND_UNSIGNED */
3767 return x;
3770 /* Return an RTX equivalent to the value of the parameter DECL. */
3772 static rtx
3773 expand_debug_parm_decl (tree decl)
3775 rtx incoming = DECL_INCOMING_RTL (decl);
3777 if (incoming
3778 && GET_MODE (incoming) != BLKmode
3779 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3780 || (MEM_P (incoming)
3781 && REG_P (XEXP (incoming, 0))
3782 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3784 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3786 #ifdef HAVE_window_save
3787 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3788 If the target machine has an explicit window save instruction, the
3789 actual entry value is the corresponding OUTGOING_REGNO instead. */
3790 if (REG_P (incoming)
3791 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3792 incoming
3793 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3794 OUTGOING_REGNO (REGNO (incoming)), 0);
3795 else if (MEM_P (incoming))
3797 rtx reg = XEXP (incoming, 0);
3798 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3800 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3801 incoming = replace_equiv_address_nv (incoming, reg);
3803 else
3804 incoming = copy_rtx (incoming);
3806 #endif
3808 ENTRY_VALUE_EXP (rtl) = incoming;
3809 return rtl;
3812 if (incoming
3813 && GET_MODE (incoming) != BLKmode
3814 && !TREE_ADDRESSABLE (decl)
3815 && MEM_P (incoming)
3816 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3817 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3818 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3819 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3820 return copy_rtx (incoming);
3822 return NULL_RTX;
3825 /* Return an RTX equivalent to the value of the tree expression EXP. */
3827 static rtx
3828 expand_debug_expr (tree exp)
3830 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3831 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3832 machine_mode inner_mode = VOIDmode;
3833 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3834 addr_space_t as;
3836 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3838 case tcc_expression:
3839 switch (TREE_CODE (exp))
3841 case COND_EXPR:
3842 case DOT_PROD_EXPR:
3843 case SAD_EXPR:
3844 case WIDEN_MULT_PLUS_EXPR:
3845 case WIDEN_MULT_MINUS_EXPR:
3846 case FMA_EXPR:
3847 goto ternary;
3849 case TRUTH_ANDIF_EXPR:
3850 case TRUTH_ORIF_EXPR:
3851 case TRUTH_AND_EXPR:
3852 case TRUTH_OR_EXPR:
3853 case TRUTH_XOR_EXPR:
3854 goto binary;
3856 case TRUTH_NOT_EXPR:
3857 goto unary;
3859 default:
3860 break;
3862 break;
3864 ternary:
3865 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3866 if (!op2)
3867 return NULL_RTX;
3868 /* Fall through. */
3870 binary:
3871 case tcc_binary:
3872 case tcc_comparison:
3873 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3874 if (!op1)
3875 return NULL_RTX;
3876 /* Fall through. */
3878 unary:
3879 case tcc_unary:
3880 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3881 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3882 if (!op0)
3883 return NULL_RTX;
3884 break;
3886 case tcc_type:
3887 case tcc_statement:
3888 gcc_unreachable ();
3890 case tcc_constant:
3891 case tcc_exceptional:
3892 case tcc_declaration:
3893 case tcc_reference:
3894 case tcc_vl_exp:
3895 break;
3898 switch (TREE_CODE (exp))
3900 case STRING_CST:
3901 if (!lookup_constant_def (exp))
3903 if (strlen (TREE_STRING_POINTER (exp)) + 1
3904 != (size_t) TREE_STRING_LENGTH (exp))
3905 return NULL_RTX;
3906 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3907 op0 = gen_rtx_MEM (BLKmode, op0);
3908 set_mem_attributes (op0, exp, 0);
3909 return op0;
3911 /* Fall through... */
3913 case INTEGER_CST:
3914 case REAL_CST:
3915 case FIXED_CST:
3916 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3917 return op0;
3919 case COMPLEX_CST:
3920 gcc_assert (COMPLEX_MODE_P (mode));
3921 op0 = expand_debug_expr (TREE_REALPART (exp));
3922 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3923 return gen_rtx_CONCAT (mode, op0, op1);
3925 case DEBUG_EXPR_DECL:
3926 op0 = DECL_RTL_IF_SET (exp);
3928 if (op0)
3929 return op0;
3931 op0 = gen_rtx_DEBUG_EXPR (mode);
3932 DEBUG_EXPR_TREE_DECL (op0) = exp;
3933 SET_DECL_RTL (exp, op0);
3935 return op0;
3937 case VAR_DECL:
3938 case PARM_DECL:
3939 case FUNCTION_DECL:
3940 case LABEL_DECL:
3941 case CONST_DECL:
3942 case RESULT_DECL:
3943 op0 = DECL_RTL_IF_SET (exp);
3945 /* This decl was probably optimized away. */
3946 if (!op0)
3948 if (TREE_CODE (exp) != VAR_DECL
3949 || DECL_EXTERNAL (exp)
3950 || !TREE_STATIC (exp)
3951 || !DECL_NAME (exp)
3952 || DECL_HARD_REGISTER (exp)
3953 || DECL_IN_CONSTANT_POOL (exp)
3954 || mode == VOIDmode)
3955 return NULL;
3957 op0 = make_decl_rtl_for_debug (exp);
3958 if (!MEM_P (op0)
3959 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3960 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3961 return NULL;
3963 else
3964 op0 = copy_rtx (op0);
3966 if (GET_MODE (op0) == BLKmode
3967 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3968 below would ICE. While it is likely a FE bug,
3969 try to be robust here. See PR43166. */
3970 || mode == BLKmode
3971 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3973 gcc_assert (MEM_P (op0));
3974 op0 = adjust_address_nv (op0, mode, 0);
3975 return op0;
3978 /* Fall through. */
3980 adjust_mode:
3981 case PAREN_EXPR:
3982 CASE_CONVERT:
3984 inner_mode = GET_MODE (op0);
3986 if (mode == inner_mode)
3987 return op0;
3989 if (inner_mode == VOIDmode)
3991 if (TREE_CODE (exp) == SSA_NAME)
3992 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3993 else
3994 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3995 if (mode == inner_mode)
3996 return op0;
3999 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4001 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4002 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4003 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4004 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4005 else
4006 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4008 else if (FLOAT_MODE_P (mode))
4010 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4011 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4012 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4013 else
4014 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4016 else if (FLOAT_MODE_P (inner_mode))
4018 if (unsignedp)
4019 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4020 else
4021 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4023 else if (CONSTANT_P (op0)
4024 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4025 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4026 subreg_lowpart_offset (mode,
4027 inner_mode));
4028 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
4029 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4030 : unsignedp)
4031 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4032 else
4033 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4035 return op0;
4038 case MEM_REF:
4039 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4041 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4042 TREE_OPERAND (exp, 0),
4043 TREE_OPERAND (exp, 1));
4044 if (newexp)
4045 return expand_debug_expr (newexp);
4047 /* FALLTHROUGH */
4048 case INDIRECT_REF:
4049 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4050 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4051 if (!op0)
4052 return NULL;
4054 if (TREE_CODE (exp) == MEM_REF)
4056 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4057 || (GET_CODE (op0) == PLUS
4058 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4059 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4060 Instead just use get_inner_reference. */
4061 goto component_ref;
4063 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4064 if (!op1 || !CONST_INT_P (op1))
4065 return NULL;
4067 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4070 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4072 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4073 op0, as);
4074 if (op0 == NULL_RTX)
4075 return NULL;
4077 op0 = gen_rtx_MEM (mode, op0);
4078 set_mem_attributes (op0, exp, 0);
4079 if (TREE_CODE (exp) == MEM_REF
4080 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4081 set_mem_expr (op0, NULL_TREE);
4082 set_mem_addr_space (op0, as);
4084 return op0;
4086 case TARGET_MEM_REF:
4087 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4088 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4089 return NULL;
4091 op0 = expand_debug_expr
4092 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4093 if (!op0)
4094 return NULL;
4096 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4097 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4098 op0, as);
4099 if (op0 == NULL_RTX)
4100 return NULL;
4102 op0 = gen_rtx_MEM (mode, op0);
4104 set_mem_attributes (op0, exp, 0);
4105 set_mem_addr_space (op0, as);
4107 return op0;
4109 component_ref:
4110 case ARRAY_REF:
4111 case ARRAY_RANGE_REF:
4112 case COMPONENT_REF:
4113 case BIT_FIELD_REF:
4114 case REALPART_EXPR:
4115 case IMAGPART_EXPR:
4116 case VIEW_CONVERT_EXPR:
4118 machine_mode mode1;
4119 HOST_WIDE_INT bitsize, bitpos;
4120 tree offset;
4121 int volatilep = 0;
4122 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4123 &mode1, &unsignedp, &volatilep, false);
4124 rtx orig_op0;
4126 if (bitsize == 0)
4127 return NULL;
4129 orig_op0 = op0 = expand_debug_expr (tem);
4131 if (!op0)
4132 return NULL;
4134 if (offset)
4136 machine_mode addrmode, offmode;
4138 if (!MEM_P (op0))
4139 return NULL;
4141 op0 = XEXP (op0, 0);
4142 addrmode = GET_MODE (op0);
4143 if (addrmode == VOIDmode)
4144 addrmode = Pmode;
4146 op1 = expand_debug_expr (offset);
4147 if (!op1)
4148 return NULL;
4150 offmode = GET_MODE (op1);
4151 if (offmode == VOIDmode)
4152 offmode = TYPE_MODE (TREE_TYPE (offset));
4154 if (addrmode != offmode)
4155 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4156 subreg_lowpart_offset (addrmode,
4157 offmode));
4159 /* Don't use offset_address here, we don't need a
4160 recognizable address, and we don't want to generate
4161 code. */
4162 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4163 op0, op1));
4166 if (MEM_P (op0))
4168 if (mode1 == VOIDmode)
4169 /* Bitfield. */
4170 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4171 if (bitpos >= BITS_PER_UNIT)
4173 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4174 bitpos %= BITS_PER_UNIT;
4176 else if (bitpos < 0)
4178 HOST_WIDE_INT units
4179 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4180 op0 = adjust_address_nv (op0, mode1, units);
4181 bitpos += units * BITS_PER_UNIT;
4183 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4184 op0 = adjust_address_nv (op0, mode, 0);
4185 else if (GET_MODE (op0) != mode1)
4186 op0 = adjust_address_nv (op0, mode1, 0);
4187 else
4188 op0 = copy_rtx (op0);
4189 if (op0 == orig_op0)
4190 op0 = shallow_copy_rtx (op0);
4191 set_mem_attributes (op0, exp, 0);
4194 if (bitpos == 0 && mode == GET_MODE (op0))
4195 return op0;
4197 if (bitpos < 0)
4198 return NULL;
4200 if (GET_MODE (op0) == BLKmode)
4201 return NULL;
4203 if ((bitpos % BITS_PER_UNIT) == 0
4204 && bitsize == GET_MODE_BITSIZE (mode1))
4206 machine_mode opmode = GET_MODE (op0);
4208 if (opmode == VOIDmode)
4209 opmode = TYPE_MODE (TREE_TYPE (tem));
4211 /* This condition may hold if we're expanding the address
4212 right past the end of an array that turned out not to
4213 be addressable (i.e., the address was only computed in
4214 debug stmts). The gen_subreg below would rightfully
4215 crash, and the address doesn't really exist, so just
4216 drop it. */
4217 if (bitpos >= GET_MODE_BITSIZE (opmode))
4218 return NULL;
4220 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4221 return simplify_gen_subreg (mode, op0, opmode,
4222 bitpos / BITS_PER_UNIT);
4225 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4226 && TYPE_UNSIGNED (TREE_TYPE (exp))
4227 ? SIGN_EXTRACT
4228 : ZERO_EXTRACT, mode,
4229 GET_MODE (op0) != VOIDmode
4230 ? GET_MODE (op0)
4231 : TYPE_MODE (TREE_TYPE (tem)),
4232 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4235 case ABS_EXPR:
4236 return simplify_gen_unary (ABS, mode, op0, mode);
4238 case NEGATE_EXPR:
4239 return simplify_gen_unary (NEG, mode, op0, mode);
4241 case BIT_NOT_EXPR:
4242 return simplify_gen_unary (NOT, mode, op0, mode);
4244 case FLOAT_EXPR:
4245 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4246 0)))
4247 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4248 inner_mode);
4250 case FIX_TRUNC_EXPR:
4251 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4252 inner_mode);
4254 case POINTER_PLUS_EXPR:
4255 /* For the rare target where pointers are not the same size as
4256 size_t, we need to check for mis-matched modes and correct
4257 the addend. */
4258 if (op0 && op1
4259 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4260 && GET_MODE (op0) != GET_MODE (op1))
4262 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4263 /* If OP0 is a partial mode, then we must truncate, even if it has
4264 the same bitsize as OP1 as GCC's representation of partial modes
4265 is opaque. */
4266 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4267 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4268 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4269 GET_MODE (op1));
4270 else
4271 /* We always sign-extend, regardless of the signedness of
4272 the operand, because the operand is always unsigned
4273 here even if the original C expression is signed. */
4274 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4275 GET_MODE (op1));
4277 /* Fall through. */
4278 case PLUS_EXPR:
4279 return simplify_gen_binary (PLUS, mode, op0, op1);
4281 case MINUS_EXPR:
4282 return simplify_gen_binary (MINUS, mode, op0, op1);
4284 case MULT_EXPR:
4285 return simplify_gen_binary (MULT, mode, op0, op1);
4287 case RDIV_EXPR:
4288 case TRUNC_DIV_EXPR:
4289 case EXACT_DIV_EXPR:
4290 if (unsignedp)
4291 return simplify_gen_binary (UDIV, mode, op0, op1);
4292 else
4293 return simplify_gen_binary (DIV, mode, op0, op1);
4295 case TRUNC_MOD_EXPR:
4296 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4298 case FLOOR_DIV_EXPR:
4299 if (unsignedp)
4300 return simplify_gen_binary (UDIV, mode, op0, op1);
4301 else
4303 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4304 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4305 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4306 return simplify_gen_binary (PLUS, mode, div, adj);
4309 case FLOOR_MOD_EXPR:
4310 if (unsignedp)
4311 return simplify_gen_binary (UMOD, mode, op0, op1);
4312 else
4314 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4315 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4316 adj = simplify_gen_unary (NEG, mode,
4317 simplify_gen_binary (MULT, mode, adj, op1),
4318 mode);
4319 return simplify_gen_binary (PLUS, mode, mod, adj);
4322 case CEIL_DIV_EXPR:
4323 if (unsignedp)
4325 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4326 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4327 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4328 return simplify_gen_binary (PLUS, mode, div, adj);
4330 else
4332 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4333 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4334 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4335 return simplify_gen_binary (PLUS, mode, div, adj);
4338 case CEIL_MOD_EXPR:
4339 if (unsignedp)
4341 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4342 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4343 adj = simplify_gen_unary (NEG, mode,
4344 simplify_gen_binary (MULT, mode, adj, op1),
4345 mode);
4346 return simplify_gen_binary (PLUS, mode, mod, adj);
4348 else
4350 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4351 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4352 adj = simplify_gen_unary (NEG, mode,
4353 simplify_gen_binary (MULT, mode, adj, op1),
4354 mode);
4355 return simplify_gen_binary (PLUS, mode, mod, adj);
4358 case ROUND_DIV_EXPR:
4359 if (unsignedp)
4361 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4362 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4363 rtx adj = round_udiv_adjust (mode, mod, op1);
4364 return simplify_gen_binary (PLUS, mode, div, adj);
4366 else
4368 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4369 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4370 rtx adj = round_sdiv_adjust (mode, mod, op1);
4371 return simplify_gen_binary (PLUS, mode, div, adj);
4374 case ROUND_MOD_EXPR:
4375 if (unsignedp)
4377 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4378 rtx adj = round_udiv_adjust (mode, mod, op1);
4379 adj = simplify_gen_unary (NEG, mode,
4380 simplify_gen_binary (MULT, mode, adj, op1),
4381 mode);
4382 return simplify_gen_binary (PLUS, mode, mod, adj);
4384 else
4386 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4387 rtx adj = round_sdiv_adjust (mode, mod, op1);
4388 adj = simplify_gen_unary (NEG, mode,
4389 simplify_gen_binary (MULT, mode, adj, op1),
4390 mode);
4391 return simplify_gen_binary (PLUS, mode, mod, adj);
4394 case LSHIFT_EXPR:
4395 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4397 case RSHIFT_EXPR:
4398 if (unsignedp)
4399 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4400 else
4401 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4403 case LROTATE_EXPR:
4404 return simplify_gen_binary (ROTATE, mode, op0, op1);
4406 case RROTATE_EXPR:
4407 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4409 case MIN_EXPR:
4410 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4412 case MAX_EXPR:
4413 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4415 case BIT_AND_EXPR:
4416 case TRUTH_AND_EXPR:
4417 return simplify_gen_binary (AND, mode, op0, op1);
4419 case BIT_IOR_EXPR:
4420 case TRUTH_OR_EXPR:
4421 return simplify_gen_binary (IOR, mode, op0, op1);
4423 case BIT_XOR_EXPR:
4424 case TRUTH_XOR_EXPR:
4425 return simplify_gen_binary (XOR, mode, op0, op1);
4427 case TRUTH_ANDIF_EXPR:
4428 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4430 case TRUTH_ORIF_EXPR:
4431 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4433 case TRUTH_NOT_EXPR:
4434 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4436 case LT_EXPR:
4437 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4438 op0, op1);
4440 case LE_EXPR:
4441 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4442 op0, op1);
4444 case GT_EXPR:
4445 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4446 op0, op1);
4448 case GE_EXPR:
4449 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4450 op0, op1);
4452 case EQ_EXPR:
4453 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4455 case NE_EXPR:
4456 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4458 case UNORDERED_EXPR:
4459 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4461 case ORDERED_EXPR:
4462 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4464 case UNLT_EXPR:
4465 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4467 case UNLE_EXPR:
4468 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4470 case UNGT_EXPR:
4471 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4473 case UNGE_EXPR:
4474 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4476 case UNEQ_EXPR:
4477 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4479 case LTGT_EXPR:
4480 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4482 case COND_EXPR:
4483 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4485 case COMPLEX_EXPR:
4486 gcc_assert (COMPLEX_MODE_P (mode));
4487 if (GET_MODE (op0) == VOIDmode)
4488 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4489 if (GET_MODE (op1) == VOIDmode)
4490 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4491 return gen_rtx_CONCAT (mode, op0, op1);
4493 case CONJ_EXPR:
4494 if (GET_CODE (op0) == CONCAT)
4495 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4496 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4497 XEXP (op0, 1),
4498 GET_MODE_INNER (mode)));
4499 else
4501 machine_mode imode = GET_MODE_INNER (mode);
4502 rtx re, im;
4504 if (MEM_P (op0))
4506 re = adjust_address_nv (op0, imode, 0);
4507 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4509 else
4511 machine_mode ifmode = int_mode_for_mode (mode);
4512 machine_mode ihmode = int_mode_for_mode (imode);
4513 rtx halfsize;
4514 if (ifmode == BLKmode || ihmode == BLKmode)
4515 return NULL;
4516 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4517 re = op0;
4518 if (mode != ifmode)
4519 re = gen_rtx_SUBREG (ifmode, re, 0);
4520 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4521 if (imode != ihmode)
4522 re = gen_rtx_SUBREG (imode, re, 0);
4523 im = copy_rtx (op0);
4524 if (mode != ifmode)
4525 im = gen_rtx_SUBREG (ifmode, im, 0);
4526 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4527 if (imode != ihmode)
4528 im = gen_rtx_SUBREG (imode, im, 0);
4530 im = gen_rtx_NEG (imode, im);
4531 return gen_rtx_CONCAT (mode, re, im);
4534 case ADDR_EXPR:
4535 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4536 if (!op0 || !MEM_P (op0))
4538 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4539 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4540 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4541 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4542 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4543 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4545 if (handled_component_p (TREE_OPERAND (exp, 0)))
4547 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4548 tree decl
4549 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4550 &bitoffset, &bitsize, &maxsize);
4551 if ((TREE_CODE (decl) == VAR_DECL
4552 || TREE_CODE (decl) == PARM_DECL
4553 || TREE_CODE (decl) == RESULT_DECL)
4554 && (!TREE_ADDRESSABLE (decl)
4555 || target_for_debug_bind (decl))
4556 && (bitoffset % BITS_PER_UNIT) == 0
4557 && bitsize > 0
4558 && bitsize == maxsize)
4560 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4561 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4565 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4566 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4567 == ADDR_EXPR)
4569 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4570 0));
4571 if (op0 != NULL
4572 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4573 || (GET_CODE (op0) == PLUS
4574 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4575 && CONST_INT_P (XEXP (op0, 1)))))
4577 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4578 1));
4579 if (!op1 || !CONST_INT_P (op1))
4580 return NULL;
4582 return plus_constant (mode, op0, INTVAL (op1));
4586 return NULL;
4589 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4590 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4592 return op0;
4594 case VECTOR_CST:
4596 unsigned i;
4598 op0 = gen_rtx_CONCATN
4599 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4601 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4603 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4604 if (!op1)
4605 return NULL;
4606 XVECEXP (op0, 0, i) = op1;
4609 return op0;
4612 case CONSTRUCTOR:
4613 if (TREE_CLOBBER_P (exp))
4614 return NULL;
4615 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4617 unsigned i;
4618 tree val;
4620 op0 = gen_rtx_CONCATN
4621 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4623 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4625 op1 = expand_debug_expr (val);
4626 if (!op1)
4627 return NULL;
4628 XVECEXP (op0, 0, i) = op1;
4631 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4633 op1 = expand_debug_expr
4634 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4636 if (!op1)
4637 return NULL;
4639 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4640 XVECEXP (op0, 0, i) = op1;
4643 return op0;
4645 else
4646 goto flag_unsupported;
4648 case CALL_EXPR:
4649 /* ??? Maybe handle some builtins? */
4650 return NULL;
4652 case SSA_NAME:
4654 gimple g = get_gimple_for_ssa_name (exp);
4655 if (g)
4657 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4658 if (!op0)
4659 return NULL;
4661 else
4663 int part = var_to_partition (SA.map, exp);
4665 if (part == NO_PARTITION)
4667 /* If this is a reference to an incoming value of parameter
4668 that is never used in the code or where the incoming
4669 value is never used in the code, use PARM_DECL's
4670 DECL_RTL if set. */
4671 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4672 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4674 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4675 if (op0)
4676 goto adjust_mode;
4677 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4678 if (op0)
4679 goto adjust_mode;
4681 return NULL;
4684 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4686 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4688 goto adjust_mode;
4691 case ERROR_MARK:
4692 return NULL;
4694 /* Vector stuff. For most of the codes we don't have rtl codes. */
4695 case REALIGN_LOAD_EXPR:
4696 case REDUC_MAX_EXPR:
4697 case REDUC_MIN_EXPR:
4698 case REDUC_PLUS_EXPR:
4699 case VEC_COND_EXPR:
4700 case VEC_PACK_FIX_TRUNC_EXPR:
4701 case VEC_PACK_SAT_EXPR:
4702 case VEC_PACK_TRUNC_EXPR:
4703 case VEC_UNPACK_FLOAT_HI_EXPR:
4704 case VEC_UNPACK_FLOAT_LO_EXPR:
4705 case VEC_UNPACK_HI_EXPR:
4706 case VEC_UNPACK_LO_EXPR:
4707 case VEC_WIDEN_MULT_HI_EXPR:
4708 case VEC_WIDEN_MULT_LO_EXPR:
4709 case VEC_WIDEN_MULT_EVEN_EXPR:
4710 case VEC_WIDEN_MULT_ODD_EXPR:
4711 case VEC_WIDEN_LSHIFT_HI_EXPR:
4712 case VEC_WIDEN_LSHIFT_LO_EXPR:
4713 case VEC_PERM_EXPR:
4714 return NULL;
4716 /* Misc codes. */
4717 case ADDR_SPACE_CONVERT_EXPR:
4718 case FIXED_CONVERT_EXPR:
4719 case OBJ_TYPE_REF:
4720 case WITH_SIZE_EXPR:
4721 return NULL;
4723 case DOT_PROD_EXPR:
4724 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4725 && SCALAR_INT_MODE_P (mode))
4728 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4729 0)))
4730 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4731 inner_mode);
4733 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4734 1)))
4735 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4736 inner_mode);
4737 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4738 return simplify_gen_binary (PLUS, mode, op0, op2);
4740 return NULL;
4742 case WIDEN_MULT_EXPR:
4743 case WIDEN_MULT_PLUS_EXPR:
4744 case WIDEN_MULT_MINUS_EXPR:
4745 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4746 && SCALAR_INT_MODE_P (mode))
4748 inner_mode = GET_MODE (op0);
4749 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4750 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4751 else
4752 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4753 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4754 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4755 else
4756 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4757 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4758 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4759 return op0;
4760 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4761 return simplify_gen_binary (PLUS, mode, op0, op2);
4762 else
4763 return simplify_gen_binary (MINUS, mode, op2, op0);
4765 return NULL;
4767 case MULT_HIGHPART_EXPR:
4768 /* ??? Similar to the above. */
4769 return NULL;
4771 case WIDEN_SUM_EXPR:
4772 case WIDEN_LSHIFT_EXPR:
4773 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4774 && SCALAR_INT_MODE_P (mode))
4777 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4778 0)))
4779 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4780 inner_mode);
4781 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4782 ? ASHIFT : PLUS, mode, op0, op1);
4784 return NULL;
4786 case FMA_EXPR:
4787 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4789 default:
4790 flag_unsupported:
4791 #ifdef ENABLE_CHECKING
4792 debug_tree (exp);
4793 gcc_unreachable ();
4794 #else
4795 return NULL;
4796 #endif
4800 /* Return an RTX equivalent to the source bind value of the tree expression
4801 EXP. */
4803 static rtx
4804 expand_debug_source_expr (tree exp)
4806 rtx op0 = NULL_RTX;
4807 machine_mode mode = VOIDmode, inner_mode;
4809 switch (TREE_CODE (exp))
4811 case PARM_DECL:
4813 mode = DECL_MODE (exp);
4814 op0 = expand_debug_parm_decl (exp);
4815 if (op0)
4816 break;
4817 /* See if this isn't an argument that has been completely
4818 optimized out. */
4819 if (!DECL_RTL_SET_P (exp)
4820 && !DECL_INCOMING_RTL (exp)
4821 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4823 tree aexp = DECL_ORIGIN (exp);
4824 if (DECL_CONTEXT (aexp)
4825 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4827 vec<tree, va_gc> **debug_args;
4828 unsigned int ix;
4829 tree ddecl;
4830 debug_args = decl_debug_args_lookup (current_function_decl);
4831 if (debug_args != NULL)
4833 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4834 ix += 2)
4835 if (ddecl == aexp)
4836 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4840 break;
4842 default:
4843 break;
4846 if (op0 == NULL_RTX)
4847 return NULL_RTX;
4849 inner_mode = GET_MODE (op0);
4850 if (mode == inner_mode)
4851 return op0;
4853 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4855 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4856 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4857 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4858 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4859 else
4860 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4862 else if (FLOAT_MODE_P (mode))
4863 gcc_unreachable ();
4864 else if (FLOAT_MODE_P (inner_mode))
4866 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4867 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4868 else
4869 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4871 else if (CONSTANT_P (op0)
4872 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4873 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4874 subreg_lowpart_offset (mode, inner_mode));
4875 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4876 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4877 else
4878 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4880 return op0;
4883 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4884 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4885 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4887 static void
4888 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4890 rtx exp = *exp_p;
4892 if (exp == NULL_RTX)
4893 return;
4895 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4896 return;
4898 if (depth == 4)
4900 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4901 rtx dval = make_debug_expr_from_rtl (exp);
4903 /* Emit a debug bind insn before INSN. */
4904 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4905 DEBUG_EXPR_TREE_DECL (dval), exp,
4906 VAR_INIT_STATUS_INITIALIZED);
4908 emit_debug_insn_before (bind, insn);
4909 *exp_p = dval;
4910 return;
4913 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4914 int i, j;
4915 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4916 switch (*format_ptr++)
4918 case 'e':
4919 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4920 break;
4922 case 'E':
4923 case 'V':
4924 for (j = 0; j < XVECLEN (exp, i); j++)
4925 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4926 break;
4928 default:
4929 break;
4933 /* Expand the _LOCs in debug insns. We run this after expanding all
4934 regular insns, so that any variables referenced in the function
4935 will have their DECL_RTLs set. */
4937 static void
4938 expand_debug_locations (void)
4940 rtx_insn *insn;
4941 rtx_insn *last = get_last_insn ();
4942 int save_strict_alias = flag_strict_aliasing;
4944 /* New alias sets while setting up memory attributes cause
4945 -fcompare-debug failures, even though it doesn't bring about any
4946 codegen changes. */
4947 flag_strict_aliasing = 0;
4949 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4950 if (DEBUG_INSN_P (insn))
4952 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4953 rtx val;
4954 rtx_insn *prev_insn, *insn2;
4955 machine_mode mode;
4957 if (value == NULL_TREE)
4958 val = NULL_RTX;
4959 else
4961 if (INSN_VAR_LOCATION_STATUS (insn)
4962 == VAR_INIT_STATUS_UNINITIALIZED)
4963 val = expand_debug_source_expr (value);
4964 else
4965 val = expand_debug_expr (value);
4966 gcc_assert (last == get_last_insn ());
4969 if (!val)
4970 val = gen_rtx_UNKNOWN_VAR_LOC ();
4971 else
4973 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4975 gcc_assert (mode == GET_MODE (val)
4976 || (GET_MODE (val) == VOIDmode
4977 && (CONST_SCALAR_INT_P (val)
4978 || GET_CODE (val) == CONST_FIXED
4979 || GET_CODE (val) == LABEL_REF)));
4982 INSN_VAR_LOCATION_LOC (insn) = val;
4983 prev_insn = PREV_INSN (insn);
4984 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4985 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
4988 flag_strict_aliasing = save_strict_alias;
4991 /* Performs swapping operands of commutative operations to expand
4992 the expensive one first. */
4994 static void
4995 reorder_operands (basic_block bb)
4997 unsigned int *lattice; /* Hold cost of each statement. */
4998 unsigned int i = 0, n = 0;
4999 gimple_stmt_iterator gsi;
5000 gimple_seq stmts;
5001 gimple stmt;
5002 bool swap;
5003 tree op0, op1;
5004 ssa_op_iter iter;
5005 use_operand_p use_p;
5006 gimple def0, def1;
5008 /* Compute cost of each statement using estimate_num_insns. */
5009 stmts = bb_seq (bb);
5010 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5012 stmt = gsi_stmt (gsi);
5013 gimple_set_uid (stmt, n++);
5015 lattice = XNEWVEC (unsigned int, n);
5016 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5018 unsigned cost;
5019 stmt = gsi_stmt (gsi);
5020 cost = estimate_num_insns (stmt, &eni_size_weights);
5021 lattice[i] = cost;
5022 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5024 tree use = USE_FROM_PTR (use_p);
5025 gimple def_stmt;
5026 if (TREE_CODE (use) != SSA_NAME)
5027 continue;
5028 def_stmt = get_gimple_for_ssa_name (use);
5029 if (!def_stmt)
5030 continue;
5031 lattice[i] += lattice[gimple_uid (def_stmt)];
5033 i++;
5034 if (!is_gimple_assign (stmt)
5035 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5036 continue;
5037 op0 = gimple_op (stmt, 1);
5038 op1 = gimple_op (stmt, 2);
5039 if (TREE_CODE (op0) != SSA_NAME
5040 || TREE_CODE (op1) != SSA_NAME)
5041 continue;
5042 /* Swap operands if the second one is more expensive. */
5043 def0 = get_gimple_for_ssa_name (op0);
5044 if (!def0)
5045 continue;
5046 def1 = get_gimple_for_ssa_name (op1);
5047 if (!def1)
5048 continue;
5049 swap = false;
5050 if (lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5051 swap = true;
5052 if (swap)
5054 if (dump_file && (dump_flags & TDF_DETAILS))
5056 fprintf (dump_file, "Swap operands in stmt:\n");
5057 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5058 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5059 lattice[gimple_uid (def0)],
5060 lattice[gimple_uid (def1)]);
5062 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5063 gimple_assign_rhs2_ptr (stmt));
5066 XDELETE (lattice);
5069 /* Expand basic block BB from GIMPLE trees to RTL. */
5071 static basic_block
5072 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5074 gimple_stmt_iterator gsi;
5075 gimple_seq stmts;
5076 gimple stmt = NULL;
5077 rtx_note *note;
5078 rtx_insn *last;
5079 edge e;
5080 edge_iterator ei;
5082 if (dump_file)
5083 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5084 bb->index);
5086 /* Note that since we are now transitioning from GIMPLE to RTL, we
5087 cannot use the gsi_*_bb() routines because they expect the basic
5088 block to be in GIMPLE, instead of RTL. Therefore, we need to
5089 access the BB sequence directly. */
5090 if (optimize)
5091 reorder_operands (bb);
5092 stmts = bb_seq (bb);
5093 bb->il.gimple.seq = NULL;
5094 bb->il.gimple.phi_nodes = NULL;
5095 rtl_profile_for_bb (bb);
5096 init_rtl_bb_info (bb);
5097 bb->flags |= BB_RTL;
5099 /* Remove the RETURN_EXPR if we may fall though to the exit
5100 instead. */
5101 gsi = gsi_last (stmts);
5102 if (!gsi_end_p (gsi)
5103 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5105 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5107 gcc_assert (single_succ_p (bb));
5108 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5110 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5111 && !gimple_return_retval (ret_stmt))
5113 gsi_remove (&gsi, false);
5114 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5118 gsi = gsi_start (stmts);
5119 if (!gsi_end_p (gsi))
5121 stmt = gsi_stmt (gsi);
5122 if (gimple_code (stmt) != GIMPLE_LABEL)
5123 stmt = NULL;
5126 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5128 if (stmt || elt)
5130 last = get_last_insn ();
5132 if (stmt)
5134 expand_gimple_stmt (stmt);
5135 gsi_next (&gsi);
5138 if (elt)
5139 emit_label (*elt);
5141 /* Java emits line number notes in the top of labels.
5142 ??? Make this go away once line number notes are obsoleted. */
5143 BB_HEAD (bb) = NEXT_INSN (last);
5144 if (NOTE_P (BB_HEAD (bb)))
5145 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5146 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5148 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5150 else
5151 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5153 NOTE_BASIC_BLOCK (note) = bb;
5155 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5157 basic_block new_bb;
5159 stmt = gsi_stmt (gsi);
5161 /* If this statement is a non-debug one, and we generate debug
5162 insns, then this one might be the last real use of a TERed
5163 SSA_NAME, but where there are still some debug uses further
5164 down. Expanding the current SSA name in such further debug
5165 uses by their RHS might lead to wrong debug info, as coalescing
5166 might make the operands of such RHS be placed into the same
5167 pseudo as something else. Like so:
5168 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5169 use(a_1);
5170 a_2 = ...
5171 #DEBUG ... => a_1
5172 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5173 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5174 the write to a_2 would actually have clobbered the place which
5175 formerly held a_0.
5177 So, instead of that, we recognize the situation, and generate
5178 debug temporaries at the last real use of TERed SSA names:
5179 a_1 = a_0 + 1;
5180 #DEBUG #D1 => a_1
5181 use(a_1);
5182 a_2 = ...
5183 #DEBUG ... => #D1
5185 if (MAY_HAVE_DEBUG_INSNS
5186 && SA.values
5187 && !is_gimple_debug (stmt))
5189 ssa_op_iter iter;
5190 tree op;
5191 gimple def;
5193 location_t sloc = curr_insn_location ();
5195 /* Look for SSA names that have their last use here (TERed
5196 names always have only one real use). */
5197 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5198 if ((def = get_gimple_for_ssa_name (op)))
5200 imm_use_iterator imm_iter;
5201 use_operand_p use_p;
5202 bool have_debug_uses = false;
5204 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5206 if (gimple_debug_bind_p (USE_STMT (use_p)))
5208 have_debug_uses = true;
5209 break;
5213 if (have_debug_uses)
5215 /* OP is a TERed SSA name, with DEF it's defining
5216 statement, and where OP is used in further debug
5217 instructions. Generate a debug temporary, and
5218 replace all uses of OP in debug insns with that
5219 temporary. */
5220 gimple debugstmt;
5221 tree value = gimple_assign_rhs_to_tree (def);
5222 tree vexpr = make_node (DEBUG_EXPR_DECL);
5223 rtx val;
5224 machine_mode mode;
5226 set_curr_insn_location (gimple_location (def));
5228 DECL_ARTIFICIAL (vexpr) = 1;
5229 TREE_TYPE (vexpr) = TREE_TYPE (value);
5230 if (DECL_P (value))
5231 mode = DECL_MODE (value);
5232 else
5233 mode = TYPE_MODE (TREE_TYPE (value));
5234 DECL_MODE (vexpr) = mode;
5236 val = gen_rtx_VAR_LOCATION
5237 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5239 emit_debug_insn (val);
5241 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5243 if (!gimple_debug_bind_p (debugstmt))
5244 continue;
5246 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5247 SET_USE (use_p, vexpr);
5249 update_stmt (debugstmt);
5253 set_curr_insn_location (sloc);
5256 currently_expanding_gimple_stmt = stmt;
5258 /* Expand this statement, then evaluate the resulting RTL and
5259 fixup the CFG accordingly. */
5260 if (gimple_code (stmt) == GIMPLE_COND)
5262 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5263 if (new_bb)
5264 return new_bb;
5266 else if (gimple_debug_bind_p (stmt))
5268 location_t sloc = curr_insn_location ();
5269 gimple_stmt_iterator nsi = gsi;
5271 for (;;)
5273 tree var = gimple_debug_bind_get_var (stmt);
5274 tree value;
5275 rtx val;
5276 machine_mode mode;
5278 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5279 && TREE_CODE (var) != LABEL_DECL
5280 && !target_for_debug_bind (var))
5281 goto delink_debug_stmt;
5283 if (gimple_debug_bind_has_value_p (stmt))
5284 value = gimple_debug_bind_get_value (stmt);
5285 else
5286 value = NULL_TREE;
5288 last = get_last_insn ();
5290 set_curr_insn_location (gimple_location (stmt));
5292 if (DECL_P (var))
5293 mode = DECL_MODE (var);
5294 else
5295 mode = TYPE_MODE (TREE_TYPE (var));
5297 val = gen_rtx_VAR_LOCATION
5298 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5300 emit_debug_insn (val);
5302 if (dump_file && (dump_flags & TDF_DETAILS))
5304 /* We can't dump the insn with a TREE where an RTX
5305 is expected. */
5306 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5307 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5308 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5311 delink_debug_stmt:
5312 /* In order not to generate too many debug temporaries,
5313 we delink all uses of debug statements we already expanded.
5314 Therefore debug statements between definition and real
5315 use of TERed SSA names will continue to use the SSA name,
5316 and not be replaced with debug temps. */
5317 delink_stmt_imm_use (stmt);
5319 gsi = nsi;
5320 gsi_next (&nsi);
5321 if (gsi_end_p (nsi))
5322 break;
5323 stmt = gsi_stmt (nsi);
5324 if (!gimple_debug_bind_p (stmt))
5325 break;
5328 set_curr_insn_location (sloc);
5330 else if (gimple_debug_source_bind_p (stmt))
5332 location_t sloc = curr_insn_location ();
5333 tree var = gimple_debug_source_bind_get_var (stmt);
5334 tree value = gimple_debug_source_bind_get_value (stmt);
5335 rtx val;
5336 machine_mode mode;
5338 last = get_last_insn ();
5340 set_curr_insn_location (gimple_location (stmt));
5342 mode = DECL_MODE (var);
5344 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5345 VAR_INIT_STATUS_UNINITIALIZED);
5347 emit_debug_insn (val);
5349 if (dump_file && (dump_flags & TDF_DETAILS))
5351 /* We can't dump the insn with a TREE where an RTX
5352 is expected. */
5353 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5354 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5355 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5358 set_curr_insn_location (sloc);
5360 else
5362 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5363 if (call_stmt
5364 && gimple_call_tail_p (call_stmt)
5365 && disable_tail_calls)
5366 gimple_call_set_tail (call_stmt, false);
5368 if (call_stmt && gimple_call_tail_p (call_stmt))
5370 bool can_fallthru;
5371 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5372 if (new_bb)
5374 if (can_fallthru)
5375 bb = new_bb;
5376 else
5377 return new_bb;
5380 else
5382 def_operand_p def_p;
5383 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5385 if (def_p != NULL)
5387 /* Ignore this stmt if it is in the list of
5388 replaceable expressions. */
5389 if (SA.values
5390 && bitmap_bit_p (SA.values,
5391 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5392 continue;
5394 last = expand_gimple_stmt (stmt);
5395 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5400 currently_expanding_gimple_stmt = NULL;
5402 /* Expand implicit goto and convert goto_locus. */
5403 FOR_EACH_EDGE (e, ei, bb->succs)
5405 if (e->goto_locus != UNKNOWN_LOCATION)
5406 set_curr_insn_location (e->goto_locus);
5407 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5409 emit_jump (label_rtx_for_bb (e->dest));
5410 e->flags &= ~EDGE_FALLTHRU;
5414 /* Expanded RTL can create a jump in the last instruction of block.
5415 This later might be assumed to be a jump to successor and break edge insertion.
5416 We need to insert dummy move to prevent this. PR41440. */
5417 if (single_succ_p (bb)
5418 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5419 && (last = get_last_insn ())
5420 && JUMP_P (last))
5422 rtx dummy = gen_reg_rtx (SImode);
5423 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5426 do_pending_stack_adjust ();
5428 /* Find the block tail. The last insn in the block is the insn
5429 before a barrier and/or table jump insn. */
5430 last = get_last_insn ();
5431 if (BARRIER_P (last))
5432 last = PREV_INSN (last);
5433 if (JUMP_TABLE_DATA_P (last))
5434 last = PREV_INSN (PREV_INSN (last));
5435 BB_END (bb) = last;
5437 update_bb_for_insn (bb);
5439 return bb;
5443 /* Create a basic block for initialization code. */
5445 static basic_block
5446 construct_init_block (void)
5448 basic_block init_block, first_block;
5449 edge e = NULL;
5450 int flags;
5452 /* Multiple entry points not supported yet. */
5453 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5454 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5455 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5456 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5457 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5459 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5461 /* When entry edge points to first basic block, we don't need jump,
5462 otherwise we have to jump into proper target. */
5463 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5465 tree label = gimple_block_label (e->dest);
5467 emit_jump (label_rtx (label));
5468 flags = 0;
5470 else
5471 flags = EDGE_FALLTHRU;
5473 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5474 get_last_insn (),
5475 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5476 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5477 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5478 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5479 if (e)
5481 first_block = e->dest;
5482 redirect_edge_succ (e, init_block);
5483 e = make_edge (init_block, first_block, flags);
5485 else
5486 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5487 e->probability = REG_BR_PROB_BASE;
5488 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5490 update_bb_for_insn (init_block);
5491 return init_block;
5494 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5495 found in the block tree. */
5497 static void
5498 set_block_levels (tree block, int level)
5500 while (block)
5502 BLOCK_NUMBER (block) = level;
5503 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5504 block = BLOCK_CHAIN (block);
5508 /* Create a block containing landing pads and similar stuff. */
5510 static void
5511 construct_exit_block (void)
5513 rtx_insn *head = get_last_insn ();
5514 rtx_insn *end;
5515 basic_block exit_block;
5516 edge e, e2;
5517 unsigned ix;
5518 edge_iterator ei;
5519 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5520 rtx_insn *orig_end = BB_END (prev_bb);
5522 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5524 /* Make sure the locus is set to the end of the function, so that
5525 epilogue line numbers and warnings are set properly. */
5526 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5527 input_location = cfun->function_end_locus;
5529 /* Generate rtl for function exit. */
5530 expand_function_end ();
5532 end = get_last_insn ();
5533 if (head == end)
5534 return;
5535 /* While emitting the function end we could move end of the last basic
5536 block. */
5537 BB_END (prev_bb) = orig_end;
5538 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5539 head = NEXT_INSN (head);
5540 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5541 bb frequency counting will be confused. Any instructions before that
5542 label are emitted for the case where PREV_BB falls through into the
5543 exit block, so append those instructions to prev_bb in that case. */
5544 if (NEXT_INSN (head) != return_label)
5546 while (NEXT_INSN (head) != return_label)
5548 if (!NOTE_P (NEXT_INSN (head)))
5549 BB_END (prev_bb) = NEXT_INSN (head);
5550 head = NEXT_INSN (head);
5553 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5554 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5555 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5556 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5558 ix = 0;
5559 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5561 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5562 if (!(e->flags & EDGE_ABNORMAL))
5563 redirect_edge_succ (e, exit_block);
5564 else
5565 ix++;
5568 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5569 e->probability = REG_BR_PROB_BASE;
5570 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5571 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5572 if (e2 != e)
5574 e->count -= e2->count;
5575 exit_block->count -= e2->count;
5576 exit_block->frequency -= EDGE_FREQUENCY (e2);
5578 if (e->count < 0)
5579 e->count = 0;
5580 if (exit_block->count < 0)
5581 exit_block->count = 0;
5582 if (exit_block->frequency < 0)
5583 exit_block->frequency = 0;
5584 update_bb_for_insn (exit_block);
5587 /* Helper function for discover_nonconstant_array_refs.
5588 Look for ARRAY_REF nodes with non-constant indexes and mark them
5589 addressable. */
5591 static tree
5592 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5593 void *data ATTRIBUTE_UNUSED)
5595 tree t = *tp;
5597 if (IS_TYPE_OR_DECL_P (t))
5598 *walk_subtrees = 0;
5599 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5601 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5602 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5603 && (!TREE_OPERAND (t, 2)
5604 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5605 || (TREE_CODE (t) == COMPONENT_REF
5606 && (!TREE_OPERAND (t,2)
5607 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5608 || TREE_CODE (t) == BIT_FIELD_REF
5609 || TREE_CODE (t) == REALPART_EXPR
5610 || TREE_CODE (t) == IMAGPART_EXPR
5611 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5612 || CONVERT_EXPR_P (t))
5613 t = TREE_OPERAND (t, 0);
5615 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5617 t = get_base_address (t);
5618 if (t && DECL_P (t)
5619 && DECL_MODE (t) != BLKmode)
5620 TREE_ADDRESSABLE (t) = 1;
5623 *walk_subtrees = 0;
5626 return NULL_TREE;
5629 /* RTL expansion is not able to compile array references with variable
5630 offsets for arrays stored in single register. Discover such
5631 expressions and mark variables as addressable to avoid this
5632 scenario. */
5634 static void
5635 discover_nonconstant_array_refs (void)
5637 basic_block bb;
5638 gimple_stmt_iterator gsi;
5640 FOR_EACH_BB_FN (bb, cfun)
5641 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5643 gimple stmt = gsi_stmt (gsi);
5644 if (!is_gimple_debug (stmt))
5645 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5649 /* This function sets crtl->args.internal_arg_pointer to a virtual
5650 register if DRAP is needed. Local register allocator will replace
5651 virtual_incoming_args_rtx with the virtual register. */
5653 static void
5654 expand_stack_alignment (void)
5656 rtx drap_rtx;
5657 unsigned int preferred_stack_boundary;
5659 if (! SUPPORTS_STACK_ALIGNMENT)
5660 return;
5662 if (cfun->calls_alloca
5663 || cfun->has_nonlocal_label
5664 || crtl->has_nonlocal_goto)
5665 crtl->need_drap = true;
5667 /* Call update_stack_boundary here again to update incoming stack
5668 boundary. It may set incoming stack alignment to a different
5669 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5670 use the minimum incoming stack alignment to check if it is OK
5671 to perform sibcall optimization since sibcall optimization will
5672 only align the outgoing stack to incoming stack boundary. */
5673 if (targetm.calls.update_stack_boundary)
5674 targetm.calls.update_stack_boundary ();
5676 /* The incoming stack frame has to be aligned at least at
5677 parm_stack_boundary. */
5678 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5680 /* Update crtl->stack_alignment_estimated and use it later to align
5681 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5682 exceptions since callgraph doesn't collect incoming stack alignment
5683 in this case. */
5684 if (cfun->can_throw_non_call_exceptions
5685 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5686 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5687 else
5688 preferred_stack_boundary = crtl->preferred_stack_boundary;
5689 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5690 crtl->stack_alignment_estimated = preferred_stack_boundary;
5691 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5692 crtl->stack_alignment_needed = preferred_stack_boundary;
5694 gcc_assert (crtl->stack_alignment_needed
5695 <= crtl->stack_alignment_estimated);
5697 crtl->stack_realign_needed
5698 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5699 crtl->stack_realign_tried = crtl->stack_realign_needed;
5701 crtl->stack_realign_processed = true;
5703 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5704 alignment. */
5705 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5706 drap_rtx = targetm.calls.get_drap_rtx ();
5708 /* stack_realign_drap and drap_rtx must match. */
5709 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5711 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5712 if (NULL != drap_rtx)
5714 crtl->args.internal_arg_pointer = drap_rtx;
5716 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5717 needed. */
5718 fixup_tail_calls ();
5723 static void
5724 expand_main_function (void)
5726 #if (defined(INVOKE__main) \
5727 || (!defined(HAS_INIT_SECTION) \
5728 && !defined(INIT_SECTION_ASM_OP) \
5729 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5730 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5731 #endif
5735 /* Expand code to initialize the stack_protect_guard. This is invoked at
5736 the beginning of a function to be protected. */
5738 #ifndef HAVE_stack_protect_set
5739 # define HAVE_stack_protect_set 0
5740 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5741 #endif
5743 static void
5744 stack_protect_prologue (void)
5746 tree guard_decl = targetm.stack_protect_guard ();
5747 rtx x, y;
5749 x = expand_normal (crtl->stack_protect_guard);
5750 y = expand_normal (guard_decl);
5752 /* Allow the target to copy from Y to X without leaking Y into a
5753 register. */
5754 if (HAVE_stack_protect_set)
5756 rtx insn = gen_stack_protect_set (x, y);
5757 if (insn)
5759 emit_insn (insn);
5760 return;
5764 /* Otherwise do a straight move. */
5765 emit_move_insn (x, y);
5768 /* Translate the intermediate representation contained in the CFG
5769 from GIMPLE trees to RTL.
5771 We do conversion per basic block and preserve/update the tree CFG.
5772 This implies we have to do some magic as the CFG can simultaneously
5773 consist of basic blocks containing RTL and GIMPLE trees. This can
5774 confuse the CFG hooks, so be careful to not manipulate CFG during
5775 the expansion. */
5777 namespace {
5779 const pass_data pass_data_expand =
5781 RTL_PASS, /* type */
5782 "expand", /* name */
5783 OPTGROUP_NONE, /* optinfo_flags */
5784 TV_EXPAND, /* tv_id */
5785 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5786 | PROP_gimple_lcx
5787 | PROP_gimple_lvec ), /* properties_required */
5788 PROP_rtl, /* properties_provided */
5789 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5790 0, /* todo_flags_start */
5791 0, /* todo_flags_finish */
5794 class pass_expand : public rtl_opt_pass
5796 public:
5797 pass_expand (gcc::context *ctxt)
5798 : rtl_opt_pass (pass_data_expand, ctxt)
5801 /* opt_pass methods: */
5802 virtual unsigned int execute (function *);
5804 }; // class pass_expand
5806 unsigned int
5807 pass_expand::execute (function *fun)
5809 basic_block bb, init_block;
5810 sbitmap blocks;
5811 edge_iterator ei;
5812 edge e;
5813 rtx_insn *var_seq, *var_ret_seq;
5814 unsigned i;
5816 timevar_push (TV_OUT_OF_SSA);
5817 rewrite_out_of_ssa (&SA);
5818 timevar_pop (TV_OUT_OF_SSA);
5819 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5821 /* Make sure all values used by the optimization passes have sane
5822 defaults. */
5823 reg_renumber = 0;
5825 /* Some backends want to know that we are expanding to RTL. */
5826 currently_expanding_to_rtl = 1;
5827 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5828 free_dominance_info (CDI_DOMINATORS);
5830 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5832 if (chkp_function_instrumented_p (current_function_decl))
5833 chkp_reset_rtl_bounds ();
5835 insn_locations_init ();
5836 if (!DECL_IS_BUILTIN (current_function_decl))
5838 /* Eventually, all FEs should explicitly set function_start_locus. */
5839 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5840 set_curr_insn_location
5841 (DECL_SOURCE_LOCATION (current_function_decl));
5842 else
5843 set_curr_insn_location (fun->function_start_locus);
5845 else
5846 set_curr_insn_location (UNKNOWN_LOCATION);
5847 prologue_location = curr_insn_location ();
5849 #ifdef INSN_SCHEDULING
5850 init_sched_attrs ();
5851 #endif
5853 /* Make sure first insn is a note even if we don't want linenums.
5854 This makes sure the first insn will never be deleted.
5855 Also, final expects a note to appear there. */
5856 emit_note (NOTE_INSN_DELETED);
5858 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5859 discover_nonconstant_array_refs ();
5861 targetm.expand_to_rtl_hook ();
5862 crtl->stack_alignment_needed = STACK_BOUNDARY;
5863 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5864 crtl->stack_alignment_estimated = 0;
5865 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5866 fun->cfg->max_jumptable_ents = 0;
5868 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5869 of the function section at exapnsion time to predict distance of calls. */
5870 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5872 /* Expand the variables recorded during gimple lowering. */
5873 timevar_push (TV_VAR_EXPAND);
5874 start_sequence ();
5876 var_ret_seq = expand_used_vars ();
5878 var_seq = get_insns ();
5879 end_sequence ();
5880 timevar_pop (TV_VAR_EXPAND);
5882 /* Honor stack protection warnings. */
5883 if (warn_stack_protect)
5885 if (fun->calls_alloca)
5886 warning (OPT_Wstack_protector,
5887 "stack protector not protecting local variables: "
5888 "variable length buffer");
5889 if (has_short_buffer && !crtl->stack_protect_guard)
5890 warning (OPT_Wstack_protector,
5891 "stack protector not protecting function: "
5892 "all local arrays are less than %d bytes long",
5893 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5896 /* Set up parameters and prepare for return, for the function. */
5897 expand_function_start (current_function_decl);
5899 /* If we emitted any instructions for setting up the variables,
5900 emit them before the FUNCTION_START note. */
5901 if (var_seq)
5903 emit_insn_before (var_seq, parm_birth_insn);
5905 /* In expand_function_end we'll insert the alloca save/restore
5906 before parm_birth_insn. We've just insertted an alloca call.
5907 Adjust the pointer to match. */
5908 parm_birth_insn = var_seq;
5911 /* Now that we also have the parameter RTXs, copy them over to our
5912 partitions. */
5913 for (i = 0; i < SA.map->num_partitions; i++)
5915 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5917 if (TREE_CODE (var) != VAR_DECL
5918 && !SA.partition_to_pseudo[i])
5919 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5920 gcc_assert (SA.partition_to_pseudo[i]);
5922 /* If this decl was marked as living in multiple places, reset
5923 this now to NULL. */
5924 if (DECL_RTL_IF_SET (var) == pc_rtx)
5925 SET_DECL_RTL (var, NULL);
5927 /* Some RTL parts really want to look at DECL_RTL(x) when x
5928 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5929 SET_DECL_RTL here making this available, but that would mean
5930 to select one of the potentially many RTLs for one DECL. Instead
5931 of doing that we simply reset the MEM_EXPR of the RTL in question,
5932 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5933 if (!DECL_RTL_SET_P (var))
5935 if (MEM_P (SA.partition_to_pseudo[i]))
5936 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5940 /* If we have a class containing differently aligned pointers
5941 we need to merge those into the corresponding RTL pointer
5942 alignment. */
5943 for (i = 1; i < num_ssa_names; i++)
5945 tree name = ssa_name (i);
5946 int part;
5947 rtx r;
5949 if (!name
5950 /* We might have generated new SSA names in
5951 update_alias_info_with_stack_vars. They will have a NULL
5952 defining statements, and won't be part of the partitioning,
5953 so ignore those. */
5954 || !SSA_NAME_DEF_STMT (name))
5955 continue;
5956 part = var_to_partition (SA.map, name);
5957 if (part == NO_PARTITION)
5958 continue;
5960 /* Adjust all partition members to get the underlying decl of
5961 the representative which we might have created in expand_one_var. */
5962 if (SSA_NAME_VAR (name) == NULL_TREE)
5964 tree leader = partition_to_var (SA.map, part);
5965 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5966 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5968 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5969 continue;
5971 r = SA.partition_to_pseudo[part];
5972 if (REG_P (r))
5973 mark_reg_pointer (r, get_pointer_alignment (name));
5976 /* If this function is `main', emit a call to `__main'
5977 to run global initializers, etc. */
5978 if (DECL_NAME (current_function_decl)
5979 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5980 && DECL_FILE_SCOPE_P (current_function_decl))
5981 expand_main_function ();
5983 /* Initialize the stack_protect_guard field. This must happen after the
5984 call to __main (if any) so that the external decl is initialized. */
5985 if (crtl->stack_protect_guard)
5986 stack_protect_prologue ();
5988 expand_phi_nodes (&SA);
5990 /* Register rtl specific functions for cfg. */
5991 rtl_register_cfg_hooks ();
5993 init_block = construct_init_block ();
5995 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5996 remaining edges later. */
5997 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
5998 e->flags &= ~EDGE_EXECUTABLE;
6000 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6001 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6002 next_bb)
6003 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6005 if (MAY_HAVE_DEBUG_INSNS)
6006 expand_debug_locations ();
6008 /* Free stuff we no longer need after GIMPLE optimizations. */
6009 free_dominance_info (CDI_DOMINATORS);
6010 free_dominance_info (CDI_POST_DOMINATORS);
6011 delete_tree_cfg_annotations ();
6013 timevar_push (TV_OUT_OF_SSA);
6014 finish_out_of_ssa (&SA);
6015 timevar_pop (TV_OUT_OF_SSA);
6017 timevar_push (TV_POST_EXPAND);
6018 /* We are no longer in SSA form. */
6019 fun->gimple_df->in_ssa_p = false;
6020 loops_state_clear (LOOP_CLOSED_SSA);
6022 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6023 conservatively to true until they are all profile aware. */
6024 delete lab_rtx_for_bb;
6025 free_histograms ();
6027 construct_exit_block ();
6028 insn_locations_finalize ();
6030 if (var_ret_seq)
6032 rtx_insn *after = return_label;
6033 rtx_insn *next = NEXT_INSN (after);
6034 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6035 after = next;
6036 emit_insn_after (var_ret_seq, after);
6039 /* Zap the tree EH table. */
6040 set_eh_throw_stmt_table (fun, NULL);
6042 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6043 split edges which edge insertions might do. */
6044 rebuild_jump_labels (get_insns ());
6046 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6047 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6049 edge e;
6050 edge_iterator ei;
6051 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6053 if (e->insns.r)
6055 rebuild_jump_labels_chain (e->insns.r);
6056 /* Put insns after parm birth, but before
6057 NOTE_INSNS_FUNCTION_BEG. */
6058 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6059 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6061 rtx_insn *insns = e->insns.r;
6062 e->insns.r = NULL;
6063 if (NOTE_P (parm_birth_insn)
6064 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6065 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6066 else
6067 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6069 else
6070 commit_one_edge_insertion (e);
6072 else
6073 ei_next (&ei);
6077 /* We're done expanding trees to RTL. */
6078 currently_expanding_to_rtl = 0;
6080 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6081 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6083 edge e;
6084 edge_iterator ei;
6085 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6087 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6088 e->flags &= ~EDGE_EXECUTABLE;
6090 /* At the moment not all abnormal edges match the RTL
6091 representation. It is safe to remove them here as
6092 find_many_sub_basic_blocks will rediscover them.
6093 In the future we should get this fixed properly. */
6094 if ((e->flags & EDGE_ABNORMAL)
6095 && !(e->flags & EDGE_SIBCALL))
6096 remove_edge (e);
6097 else
6098 ei_next (&ei);
6102 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6103 bitmap_ones (blocks);
6104 find_many_sub_basic_blocks (blocks);
6105 sbitmap_free (blocks);
6106 purge_all_dead_edges ();
6108 expand_stack_alignment ();
6110 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6111 function. */
6112 if (crtl->tail_call_emit)
6113 fixup_tail_calls ();
6115 /* After initial rtl generation, call back to finish generating
6116 exception support code. We need to do this before cleaning up
6117 the CFG as the code does not expect dead landing pads. */
6118 if (fun->eh->region_tree != NULL)
6119 finish_eh_generation ();
6121 /* Remove unreachable blocks, otherwise we cannot compute dominators
6122 which are needed for loop state verification. As a side-effect
6123 this also compacts blocks.
6124 ??? We cannot remove trivially dead insns here as for example
6125 the DRAP reg on i?86 is not magically live at this point.
6126 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6127 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6129 #ifdef ENABLE_CHECKING
6130 verify_flow_info ();
6131 #endif
6133 /* Initialize pseudos allocated for hard registers. */
6134 emit_initial_value_sets ();
6136 /* And finally unshare all RTL. */
6137 unshare_all_rtl ();
6139 /* There's no need to defer outputting this function any more; we
6140 know we want to output it. */
6141 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6143 /* Now that we're done expanding trees to RTL, we shouldn't have any
6144 more CONCATs anywhere. */
6145 generating_concat_p = 0;
6147 if (dump_file)
6149 fprintf (dump_file,
6150 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6151 /* And the pass manager will dump RTL for us. */
6154 /* If we're emitting a nested function, make sure its parent gets
6155 emitted as well. Doing otherwise confuses debug info. */
6157 tree parent;
6158 for (parent = DECL_CONTEXT (current_function_decl);
6159 parent != NULL_TREE;
6160 parent = get_containing_scope (parent))
6161 if (TREE_CODE (parent) == FUNCTION_DECL)
6162 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6165 /* We are now committed to emitting code for this function. Do any
6166 preparation, such as emitting abstract debug info for the inline
6167 before it gets mangled by optimization. */
6168 if (cgraph_function_possibly_inlined_p (current_function_decl))
6169 (*debug_hooks->outlining_inline_function) (current_function_decl);
6171 TREE_ASM_WRITTEN (current_function_decl) = 1;
6173 /* After expanding, the return labels are no longer needed. */
6174 return_label = NULL;
6175 naked_return_label = NULL;
6177 /* After expanding, the tm_restart map is no longer needed. */
6178 if (fun->gimple_df->tm_restart)
6179 fun->gimple_df->tm_restart = NULL;
6181 /* Tag the blocks with a depth number so that change_scope can find
6182 the common parent easily. */
6183 set_block_levels (DECL_INITIAL (fun->decl), 0);
6184 default_rtl_profile ();
6186 timevar_pop (TV_POST_EXPAND);
6188 return 0;
6191 } // anon namespace
6193 rtl_opt_pass *
6194 make_pass_expand (gcc::context *ctxt)
6196 return new pass_expand (ctxt);