* sreal.c (sreal::to_int): Use INTTYPE_MAXIMUM (int64_t)
[official-gcc.git] / gcc / cfgexpand.c
blob2df8ce3ba84b8da974fdbbfeaf3c4f6412c32772
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "varasm.h"
29 #include "stor-layout.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "machmode.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfgrtl.h"
43 #include "cfganal.h"
44 #include "cfgbuild.h"
45 #include "cfgcleanup.h"
46 #include "basic-block.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "expr.h"
50 #include "langhooks.h"
51 #include "bitmap.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimple-iterator.h"
59 #include "gimple-walk.h"
60 #include "gimple-ssa.h"
61 #include "hash-map.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "tree-cfg.h"
66 #include "tree-phinodes.h"
67 #include "ssa-iterators.h"
68 #include "tree-ssanames.h"
69 #include "tree-dfa.h"
70 #include "tree-ssa.h"
71 #include "tree-pass.h"
72 #include "except.h"
73 #include "flags.h"
74 #include "diagnostic.h"
75 #include "gimple-pretty-print.h"
76 #include "toplev.h"
77 #include "debug.h"
78 #include "params.h"
79 #include "tree-inline.h"
80 #include "value-prof.h"
81 #include "target.h"
82 #include "tree-ssa-live.h"
83 #include "tree-outof-ssa.h"
84 #include "sbitmap.h"
85 #include "cfgloop.h"
86 #include "regs.h" /* For reg_renumber. */
87 #include "insn-attr.h" /* For INSN_SCHEDULING. */
88 #include "asan.h"
89 #include "tree-ssa-address.h"
90 #include "recog.h"
91 #include "output.h"
92 #include "builtins.h"
93 #include "tree-chkp.h"
94 #include "rtl-chkp.h"
96 /* Some systems use __main in a way incompatible with its use in gcc, in these
97 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
98 give the same symbol without quotes for an alternative entry point. You
99 must define both, or neither. */
100 #ifndef NAME__MAIN
101 #define NAME__MAIN "__main"
102 #endif
104 /* This variable holds information helping the rewriting of SSA trees
105 into RTL. */
106 struct ssaexpand SA;
108 /* This variable holds the currently expanded gimple statement for purposes
109 of comminucating the profile info to the builtin expanders. */
110 gimple currently_expanding_gimple_stmt;
112 static rtx expand_debug_expr (tree);
114 /* Return an expression tree corresponding to the RHS of GIMPLE
115 statement STMT. */
117 tree
118 gimple_assign_rhs_to_tree (gimple stmt)
120 tree t;
121 enum gimple_rhs_class grhs_class;
123 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
125 if (grhs_class == GIMPLE_TERNARY_RHS)
126 t = build3 (gimple_assign_rhs_code (stmt),
127 TREE_TYPE (gimple_assign_lhs (stmt)),
128 gimple_assign_rhs1 (stmt),
129 gimple_assign_rhs2 (stmt),
130 gimple_assign_rhs3 (stmt));
131 else if (grhs_class == GIMPLE_BINARY_RHS)
132 t = build2 (gimple_assign_rhs_code (stmt),
133 TREE_TYPE (gimple_assign_lhs (stmt)),
134 gimple_assign_rhs1 (stmt),
135 gimple_assign_rhs2 (stmt));
136 else if (grhs_class == GIMPLE_UNARY_RHS)
137 t = build1 (gimple_assign_rhs_code (stmt),
138 TREE_TYPE (gimple_assign_lhs (stmt)),
139 gimple_assign_rhs1 (stmt));
140 else if (grhs_class == GIMPLE_SINGLE_RHS)
142 t = gimple_assign_rhs1 (stmt);
143 /* Avoid modifying this tree in place below. */
144 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
145 && gimple_location (stmt) != EXPR_LOCATION (t))
146 || (gimple_block (stmt)
147 && currently_expanding_to_rtl
148 && EXPR_P (t)))
149 t = copy_node (t);
151 else
152 gcc_unreachable ();
154 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
155 SET_EXPR_LOCATION (t, gimple_location (stmt));
157 return t;
161 #ifndef STACK_ALIGNMENT_NEEDED
162 #define STACK_ALIGNMENT_NEEDED 1
163 #endif
165 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
167 /* Associate declaration T with storage space X. If T is no
168 SSA name this is exactly SET_DECL_RTL, otherwise make the
169 partition of T associated with X. */
170 static inline void
171 set_rtl (tree t, rtx x)
173 if (TREE_CODE (t) == SSA_NAME)
175 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
176 if (x && !MEM_P (x))
177 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
178 /* For the benefit of debug information at -O0 (where vartracking
179 doesn't run) record the place also in the base DECL if it's
180 a normal variable (not a parameter). */
181 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
183 tree var = SSA_NAME_VAR (t);
184 /* If we don't yet have something recorded, just record it now. */
185 if (!DECL_RTL_SET_P (var))
186 SET_DECL_RTL (var, x);
187 /* If we have it set already to "multiple places" don't
188 change this. */
189 else if (DECL_RTL (var) == pc_rtx)
191 /* If we have something recorded and it's not the same place
192 as we want to record now, we have multiple partitions for the
193 same base variable, with different places. We can't just
194 randomly chose one, hence we have to say that we don't know.
195 This only happens with optimization, and there var-tracking
196 will figure out the right thing. */
197 else if (DECL_RTL (var) != x)
198 SET_DECL_RTL (var, pc_rtx);
201 else
202 SET_DECL_RTL (t, x);
205 /* This structure holds data relevant to one variable that will be
206 placed in a stack slot. */
207 struct stack_var
209 /* The Variable. */
210 tree decl;
212 /* Initially, the size of the variable. Later, the size of the partition,
213 if this variable becomes it's partition's representative. */
214 HOST_WIDE_INT size;
216 /* The *byte* alignment required for this variable. Or as, with the
217 size, the alignment for this partition. */
218 unsigned int alignb;
220 /* The partition representative. */
221 size_t representative;
223 /* The next stack variable in the partition, or EOC. */
224 size_t next;
226 /* The numbers of conflicting stack variables. */
227 bitmap conflicts;
230 #define EOC ((size_t)-1)
232 /* We have an array of such objects while deciding allocation. */
233 static struct stack_var *stack_vars;
234 static size_t stack_vars_alloc;
235 static size_t stack_vars_num;
236 static hash_map<tree, size_t> *decl_to_stack_part;
238 /* Conflict bitmaps go on this obstack. This allows us to destroy
239 all of them in one big sweep. */
240 static bitmap_obstack stack_var_bitmap_obstack;
242 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
243 is non-decreasing. */
244 static size_t *stack_vars_sorted;
246 /* The phase of the stack frame. This is the known misalignment of
247 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
248 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
249 static int frame_phase;
251 /* Used during expand_used_vars to remember if we saw any decls for
252 which we'd like to enable stack smashing protection. */
253 static bool has_protected_decls;
255 /* Used during expand_used_vars. Remember if we say a character buffer
256 smaller than our cutoff threshold. Used for -Wstack-protector. */
257 static bool has_short_buffer;
259 /* Compute the byte alignment to use for DECL. Ignore alignment
260 we can't do with expected alignment of the stack boundary. */
262 static unsigned int
263 align_local_variable (tree decl)
265 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
266 DECL_ALIGN (decl) = align;
267 return align / BITS_PER_UNIT;
270 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
271 Return the frame offset. */
273 static HOST_WIDE_INT
274 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
276 HOST_WIDE_INT offset, new_frame_offset;
278 new_frame_offset = frame_offset;
279 if (FRAME_GROWS_DOWNWARD)
281 new_frame_offset -= size + frame_phase;
282 new_frame_offset &= -align;
283 new_frame_offset += frame_phase;
284 offset = new_frame_offset;
286 else
288 new_frame_offset -= frame_phase;
289 new_frame_offset += align - 1;
290 new_frame_offset &= -align;
291 new_frame_offset += frame_phase;
292 offset = new_frame_offset;
293 new_frame_offset += size;
295 frame_offset = new_frame_offset;
297 if (frame_offset_overflow (frame_offset, cfun->decl))
298 frame_offset = offset = 0;
300 return offset;
303 /* Accumulate DECL into STACK_VARS. */
305 static void
306 add_stack_var (tree decl)
308 struct stack_var *v;
310 if (stack_vars_num >= stack_vars_alloc)
312 if (stack_vars_alloc)
313 stack_vars_alloc = stack_vars_alloc * 3 / 2;
314 else
315 stack_vars_alloc = 32;
316 stack_vars
317 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
319 if (!decl_to_stack_part)
320 decl_to_stack_part = new hash_map<tree, size_t>;
322 v = &stack_vars[stack_vars_num];
323 decl_to_stack_part->put (decl, stack_vars_num);
325 v->decl = decl;
326 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
327 /* Ensure that all variables have size, so that &a != &b for any two
328 variables that are simultaneously live. */
329 if (v->size == 0)
330 v->size = 1;
331 v->alignb = align_local_variable (SSAVAR (decl));
332 /* An alignment of zero can mightily confuse us later. */
333 gcc_assert (v->alignb != 0);
335 /* All variables are initially in their own partition. */
336 v->representative = stack_vars_num;
337 v->next = EOC;
339 /* All variables initially conflict with no other. */
340 v->conflicts = NULL;
342 /* Ensure that this decl doesn't get put onto the list twice. */
343 set_rtl (decl, pc_rtx);
345 stack_vars_num++;
348 /* Make the decls associated with luid's X and Y conflict. */
350 static void
351 add_stack_var_conflict (size_t x, size_t y)
353 struct stack_var *a = &stack_vars[x];
354 struct stack_var *b = &stack_vars[y];
355 if (!a->conflicts)
356 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
357 if (!b->conflicts)
358 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
359 bitmap_set_bit (a->conflicts, y);
360 bitmap_set_bit (b->conflicts, x);
363 /* Check whether the decls associated with luid's X and Y conflict. */
365 static bool
366 stack_var_conflict_p (size_t x, size_t y)
368 struct stack_var *a = &stack_vars[x];
369 struct stack_var *b = &stack_vars[y];
370 if (x == y)
371 return false;
372 /* Partitions containing an SSA name result from gimple registers
373 with things like unsupported modes. They are top-level and
374 hence conflict with everything else. */
375 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
376 return true;
378 if (!a->conflicts || !b->conflicts)
379 return false;
380 return bitmap_bit_p (a->conflicts, y);
383 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
384 enter its partition number into bitmap DATA. */
386 static bool
387 visit_op (gimple, tree op, tree, void *data)
389 bitmap active = (bitmap)data;
390 op = get_base_address (op);
391 if (op
392 && DECL_P (op)
393 && DECL_RTL_IF_SET (op) == pc_rtx)
395 size_t *v = decl_to_stack_part->get (op);
396 if (v)
397 bitmap_set_bit (active, *v);
399 return false;
402 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
403 record conflicts between it and all currently active other partitions
404 from bitmap DATA. */
406 static bool
407 visit_conflict (gimple, tree op, tree, void *data)
409 bitmap active = (bitmap)data;
410 op = get_base_address (op);
411 if (op
412 && DECL_P (op)
413 && DECL_RTL_IF_SET (op) == pc_rtx)
415 size_t *v = decl_to_stack_part->get (op);
416 if (v && bitmap_set_bit (active, *v))
418 size_t num = *v;
419 bitmap_iterator bi;
420 unsigned i;
421 gcc_assert (num < stack_vars_num);
422 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
423 add_stack_var_conflict (num, i);
426 return false;
429 /* Helper routine for add_scope_conflicts, calculating the active partitions
430 at the end of BB, leaving the result in WORK. We're called to generate
431 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
432 liveness. */
434 static void
435 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
437 edge e;
438 edge_iterator ei;
439 gimple_stmt_iterator gsi;
440 walk_stmt_load_store_addr_fn visit;
442 bitmap_clear (work);
443 FOR_EACH_EDGE (e, ei, bb->preds)
444 bitmap_ior_into (work, (bitmap)e->src->aux);
446 visit = visit_op;
448 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
450 gimple stmt = gsi_stmt (gsi);
451 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
453 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
455 gimple stmt = gsi_stmt (gsi);
457 if (gimple_clobber_p (stmt))
459 tree lhs = gimple_assign_lhs (stmt);
460 size_t *v;
461 /* Nested function lowering might introduce LHSs
462 that are COMPONENT_REFs. */
463 if (TREE_CODE (lhs) != VAR_DECL)
464 continue;
465 if (DECL_RTL_IF_SET (lhs) == pc_rtx
466 && (v = decl_to_stack_part->get (lhs)))
467 bitmap_clear_bit (work, *v);
469 else if (!is_gimple_debug (stmt))
471 if (for_conflict
472 && visit == visit_op)
474 /* If this is the first real instruction in this BB we need
475 to add conflicts for everything live at this point now.
476 Unlike classical liveness for named objects we can't
477 rely on seeing a def/use of the names we're interested in.
478 There might merely be indirect loads/stores. We'd not add any
479 conflicts for such partitions. */
480 bitmap_iterator bi;
481 unsigned i;
482 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
484 struct stack_var *a = &stack_vars[i];
485 if (!a->conflicts)
486 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
487 bitmap_ior_into (a->conflicts, work);
489 visit = visit_conflict;
491 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
496 /* Generate stack partition conflicts between all partitions that are
497 simultaneously live. */
499 static void
500 add_scope_conflicts (void)
502 basic_block bb;
503 bool changed;
504 bitmap work = BITMAP_ALLOC (NULL);
505 int *rpo;
506 int n_bbs;
508 /* We approximate the live range of a stack variable by taking the first
509 mention of its name as starting point(s), and by the end-of-scope
510 death clobber added by gimplify as ending point(s) of the range.
511 This overapproximates in the case we for instance moved an address-taken
512 operation upward, without also moving a dereference to it upwards.
513 But it's conservatively correct as a variable never can hold values
514 before its name is mentioned at least once.
516 We then do a mostly classical bitmap liveness algorithm. */
518 FOR_ALL_BB_FN (bb, cfun)
519 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
521 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
522 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
524 changed = true;
525 while (changed)
527 int i;
528 changed = false;
529 for (i = 0; i < n_bbs; i++)
531 bitmap active;
532 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
533 active = (bitmap)bb->aux;
534 add_scope_conflicts_1 (bb, work, false);
535 if (bitmap_ior_into (active, work))
536 changed = true;
540 FOR_EACH_BB_FN (bb, cfun)
541 add_scope_conflicts_1 (bb, work, true);
543 free (rpo);
544 BITMAP_FREE (work);
545 FOR_ALL_BB_FN (bb, cfun)
546 BITMAP_FREE (bb->aux);
549 /* A subroutine of partition_stack_vars. A comparison function for qsort,
550 sorting an array of indices by the properties of the object. */
552 static int
553 stack_var_cmp (const void *a, const void *b)
555 size_t ia = *(const size_t *)a;
556 size_t ib = *(const size_t *)b;
557 unsigned int aligna = stack_vars[ia].alignb;
558 unsigned int alignb = stack_vars[ib].alignb;
559 HOST_WIDE_INT sizea = stack_vars[ia].size;
560 HOST_WIDE_INT sizeb = stack_vars[ib].size;
561 tree decla = stack_vars[ia].decl;
562 tree declb = stack_vars[ib].decl;
563 bool largea, largeb;
564 unsigned int uida, uidb;
566 /* Primary compare on "large" alignment. Large comes first. */
567 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
568 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
569 if (largea != largeb)
570 return (int)largeb - (int)largea;
572 /* Secondary compare on size, decreasing */
573 if (sizea > sizeb)
574 return -1;
575 if (sizea < sizeb)
576 return 1;
578 /* Tertiary compare on true alignment, decreasing. */
579 if (aligna < alignb)
580 return -1;
581 if (aligna > alignb)
582 return 1;
584 /* Final compare on ID for sort stability, increasing.
585 Two SSA names are compared by their version, SSA names come before
586 non-SSA names, and two normal decls are compared by their DECL_UID. */
587 if (TREE_CODE (decla) == SSA_NAME)
589 if (TREE_CODE (declb) == SSA_NAME)
590 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
591 else
592 return -1;
594 else if (TREE_CODE (declb) == SSA_NAME)
595 return 1;
596 else
597 uida = DECL_UID (decla), uidb = DECL_UID (declb);
598 if (uida < uidb)
599 return 1;
600 if (uida > uidb)
601 return -1;
602 return 0;
605 struct part_traits : default_hashmap_traits
607 template<typename T>
608 static bool
609 is_deleted (T &e)
610 { return e.m_value == reinterpret_cast<void *> (1); }
612 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
613 template<typename T>
614 static void
615 mark_deleted (T &e)
616 { e.m_value = reinterpret_cast<T> (1); }
618 template<typename T>
619 static void
620 mark_empty (T &e)
621 { e.m_value = NULL; }
624 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
626 /* If the points-to solution *PI points to variables that are in a partition
627 together with other variables add all partition members to the pointed-to
628 variables bitmap. */
630 static void
631 add_partitioned_vars_to_ptset (struct pt_solution *pt,
632 part_hashmap *decls_to_partitions,
633 hash_set<bitmap> *visited, bitmap temp)
635 bitmap_iterator bi;
636 unsigned i;
637 bitmap *part;
639 if (pt->anything
640 || pt->vars == NULL
641 /* The pointed-to vars bitmap is shared, it is enough to
642 visit it once. */
643 || visited->add (pt->vars))
644 return;
646 bitmap_clear (temp);
648 /* By using a temporary bitmap to store all members of the partitions
649 we have to add we make sure to visit each of the partitions only
650 once. */
651 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
652 if ((!temp
653 || !bitmap_bit_p (temp, i))
654 && (part = decls_to_partitions->get (i)))
655 bitmap_ior_into (temp, *part);
656 if (!bitmap_empty_p (temp))
657 bitmap_ior_into (pt->vars, temp);
660 /* Update points-to sets based on partition info, so we can use them on RTL.
661 The bitmaps representing stack partitions will be saved until expand,
662 where partitioned decls used as bases in memory expressions will be
663 rewritten. */
665 static void
666 update_alias_info_with_stack_vars (void)
668 part_hashmap *decls_to_partitions = NULL;
669 size_t i, j;
670 tree var = NULL_TREE;
672 for (i = 0; i < stack_vars_num; i++)
674 bitmap part = NULL;
675 tree name;
676 struct ptr_info_def *pi;
678 /* Not interested in partitions with single variable. */
679 if (stack_vars[i].representative != i
680 || stack_vars[i].next == EOC)
681 continue;
683 if (!decls_to_partitions)
685 decls_to_partitions = new part_hashmap;
686 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
689 /* Create an SSA_NAME that points to the partition for use
690 as base during alias-oracle queries on RTL for bases that
691 have been partitioned. */
692 if (var == NULL_TREE)
693 var = create_tmp_var (ptr_type_node, NULL);
694 name = make_ssa_name (var, NULL);
696 /* Create bitmaps representing partitions. They will be used for
697 points-to sets later, so use GGC alloc. */
698 part = BITMAP_GGC_ALLOC ();
699 for (j = i; j != EOC; j = stack_vars[j].next)
701 tree decl = stack_vars[j].decl;
702 unsigned int uid = DECL_PT_UID (decl);
703 bitmap_set_bit (part, uid);
704 decls_to_partitions->put (uid, part);
705 cfun->gimple_df->decls_to_pointers->put (decl, name);
706 if (TREE_ADDRESSABLE (decl))
707 TREE_ADDRESSABLE (name) = 1;
710 /* Make the SSA name point to all partition members. */
711 pi = get_ptr_info (name);
712 pt_solution_set (&pi->pt, part, false);
715 /* Make all points-to sets that contain one member of a partition
716 contain all members of the partition. */
717 if (decls_to_partitions)
719 unsigned i;
720 hash_set<bitmap> visited;
721 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
723 for (i = 1; i < num_ssa_names; i++)
725 tree name = ssa_name (i);
726 struct ptr_info_def *pi;
728 if (name
729 && POINTER_TYPE_P (TREE_TYPE (name))
730 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
731 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
732 &visited, temp);
735 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
736 decls_to_partitions, &visited, temp);
738 delete decls_to_partitions;
739 BITMAP_FREE (temp);
743 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
744 partitioning algorithm. Partitions A and B are known to be non-conflicting.
745 Merge them into a single partition A. */
747 static void
748 union_stack_vars (size_t a, size_t b)
750 struct stack_var *vb = &stack_vars[b];
751 bitmap_iterator bi;
752 unsigned u;
754 gcc_assert (stack_vars[b].next == EOC);
755 /* Add B to A's partition. */
756 stack_vars[b].next = stack_vars[a].next;
757 stack_vars[b].representative = a;
758 stack_vars[a].next = b;
760 /* Update the required alignment of partition A to account for B. */
761 if (stack_vars[a].alignb < stack_vars[b].alignb)
762 stack_vars[a].alignb = stack_vars[b].alignb;
764 /* Update the interference graph and merge the conflicts. */
765 if (vb->conflicts)
767 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
768 add_stack_var_conflict (a, stack_vars[u].representative);
769 BITMAP_FREE (vb->conflicts);
773 /* A subroutine of expand_used_vars. Binpack the variables into
774 partitions constrained by the interference graph. The overall
775 algorithm used is as follows:
777 Sort the objects by size in descending order.
778 For each object A {
779 S = size(A)
780 O = 0
781 loop {
782 Look for the largest non-conflicting object B with size <= S.
783 UNION (A, B)
788 static void
789 partition_stack_vars (void)
791 size_t si, sj, n = stack_vars_num;
793 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
794 for (si = 0; si < n; ++si)
795 stack_vars_sorted[si] = si;
797 if (n == 1)
798 return;
800 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
802 for (si = 0; si < n; ++si)
804 size_t i = stack_vars_sorted[si];
805 unsigned int ialign = stack_vars[i].alignb;
806 HOST_WIDE_INT isize = stack_vars[i].size;
808 /* Ignore objects that aren't partition representatives. If we
809 see a var that is not a partition representative, it must
810 have been merged earlier. */
811 if (stack_vars[i].representative != i)
812 continue;
814 for (sj = si + 1; sj < n; ++sj)
816 size_t j = stack_vars_sorted[sj];
817 unsigned int jalign = stack_vars[j].alignb;
818 HOST_WIDE_INT jsize = stack_vars[j].size;
820 /* Ignore objects that aren't partition representatives. */
821 if (stack_vars[j].representative != j)
822 continue;
824 /* Do not mix objects of "small" (supported) alignment
825 and "large" (unsupported) alignment. */
826 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
827 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
828 break;
830 /* For Address Sanitizer do not mix objects with different
831 sizes, as the shorter vars wouldn't be adequately protected.
832 Don't do that for "large" (unsupported) alignment objects,
833 those aren't protected anyway. */
834 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
835 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
836 break;
838 /* Ignore conflicting objects. */
839 if (stack_var_conflict_p (i, j))
840 continue;
842 /* UNION the objects, placing J at OFFSET. */
843 union_stack_vars (i, j);
847 update_alias_info_with_stack_vars ();
850 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
852 static void
853 dump_stack_var_partition (void)
855 size_t si, i, j, n = stack_vars_num;
857 for (si = 0; si < n; ++si)
859 i = stack_vars_sorted[si];
861 /* Skip variables that aren't partition representatives, for now. */
862 if (stack_vars[i].representative != i)
863 continue;
865 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
866 " align %u\n", (unsigned long) i, stack_vars[i].size,
867 stack_vars[i].alignb);
869 for (j = i; j != EOC; j = stack_vars[j].next)
871 fputc ('\t', dump_file);
872 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
874 fputc ('\n', dump_file);
878 /* Assign rtl to DECL at BASE + OFFSET. */
880 static void
881 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
882 HOST_WIDE_INT offset)
884 unsigned align;
885 rtx x;
887 /* If this fails, we've overflowed the stack frame. Error nicely? */
888 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
890 x = plus_constant (Pmode, base, offset);
891 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
893 if (TREE_CODE (decl) != SSA_NAME)
895 /* Set alignment we actually gave this decl if it isn't an SSA name.
896 If it is we generate stack slots only accidentally so it isn't as
897 important, we'll simply use the alignment that is already set. */
898 if (base == virtual_stack_vars_rtx)
899 offset -= frame_phase;
900 align = offset & -offset;
901 align *= BITS_PER_UNIT;
902 if (align == 0 || align > base_align)
903 align = base_align;
905 /* One would think that we could assert that we're not decreasing
906 alignment here, but (at least) the i386 port does exactly this
907 via the MINIMUM_ALIGNMENT hook. */
909 DECL_ALIGN (decl) = align;
910 DECL_USER_ALIGN (decl) = 0;
913 set_mem_attributes (x, SSAVAR (decl), true);
914 set_rtl (decl, x);
917 struct stack_vars_data
919 /* Vector of offset pairs, always end of some padding followed
920 by start of the padding that needs Address Sanitizer protection.
921 The vector is in reversed, highest offset pairs come first. */
922 vec<HOST_WIDE_INT> asan_vec;
924 /* Vector of partition representative decls in between the paddings. */
925 vec<tree> asan_decl_vec;
927 /* Base pseudo register for Address Sanitizer protected automatic vars. */
928 rtx asan_base;
930 /* Alignment needed for the Address Sanitizer protected automatic vars. */
931 unsigned int asan_alignb;
934 /* A subroutine of expand_used_vars. Give each partition representative
935 a unique location within the stack frame. Update each partition member
936 with that location. */
938 static void
939 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
941 size_t si, i, j, n = stack_vars_num;
942 HOST_WIDE_INT large_size = 0, large_alloc = 0;
943 rtx large_base = NULL;
944 unsigned large_align = 0;
945 tree decl;
947 /* Determine if there are any variables requiring "large" alignment.
948 Since these are dynamically allocated, we only process these if
949 no predicate involved. */
950 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
951 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
953 /* Find the total size of these variables. */
954 for (si = 0; si < n; ++si)
956 unsigned alignb;
958 i = stack_vars_sorted[si];
959 alignb = stack_vars[i].alignb;
961 /* Stop when we get to the first decl with "small" alignment. */
962 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
963 break;
965 /* Skip variables that aren't partition representatives. */
966 if (stack_vars[i].representative != i)
967 continue;
969 /* Skip variables that have already had rtl assigned. See also
970 add_stack_var where we perpetrate this pc_rtx hack. */
971 decl = stack_vars[i].decl;
972 if ((TREE_CODE (decl) == SSA_NAME
973 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
974 : DECL_RTL (decl)) != pc_rtx)
975 continue;
977 large_size += alignb - 1;
978 large_size &= -(HOST_WIDE_INT)alignb;
979 large_size += stack_vars[i].size;
982 /* If there were any, allocate space. */
983 if (large_size > 0)
984 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
985 large_align, true);
988 for (si = 0; si < n; ++si)
990 rtx base;
991 unsigned base_align, alignb;
992 HOST_WIDE_INT offset;
994 i = stack_vars_sorted[si];
996 /* Skip variables that aren't partition representatives, for now. */
997 if (stack_vars[i].representative != i)
998 continue;
1000 /* Skip variables that have already had rtl assigned. See also
1001 add_stack_var where we perpetrate this pc_rtx hack. */
1002 decl = stack_vars[i].decl;
1003 if ((TREE_CODE (decl) == SSA_NAME
1004 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1005 : DECL_RTL (decl)) != pc_rtx)
1006 continue;
1008 /* Check the predicate to see whether this variable should be
1009 allocated in this pass. */
1010 if (pred && !pred (i))
1011 continue;
1013 alignb = stack_vars[i].alignb;
1014 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1016 base = virtual_stack_vars_rtx;
1017 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1019 HOST_WIDE_INT prev_offset = frame_offset;
1020 tree repr_decl = NULL_TREE;
1022 offset
1023 = alloc_stack_frame_space (stack_vars[i].size
1024 + ASAN_RED_ZONE_SIZE,
1025 MAX (alignb, ASAN_RED_ZONE_SIZE));
1026 data->asan_vec.safe_push (prev_offset);
1027 data->asan_vec.safe_push (offset + stack_vars[i].size);
1028 /* Find best representative of the partition.
1029 Prefer those with DECL_NAME, even better
1030 satisfying asan_protect_stack_decl predicate. */
1031 for (j = i; j != EOC; j = stack_vars[j].next)
1032 if (asan_protect_stack_decl (stack_vars[j].decl)
1033 && DECL_NAME (stack_vars[j].decl))
1035 repr_decl = stack_vars[j].decl;
1036 break;
1038 else if (repr_decl == NULL_TREE
1039 && DECL_P (stack_vars[j].decl)
1040 && DECL_NAME (stack_vars[j].decl))
1041 repr_decl = stack_vars[j].decl;
1042 if (repr_decl == NULL_TREE)
1043 repr_decl = stack_vars[i].decl;
1044 data->asan_decl_vec.safe_push (repr_decl);
1045 data->asan_alignb = MAX (data->asan_alignb, alignb);
1046 if (data->asan_base == NULL)
1047 data->asan_base = gen_reg_rtx (Pmode);
1048 base = data->asan_base;
1050 if (!STRICT_ALIGNMENT)
1051 base_align = crtl->max_used_stack_slot_alignment;
1052 else
1053 base_align = MAX (crtl->max_used_stack_slot_alignment,
1054 GET_MODE_ALIGNMENT (SImode)
1055 << ASAN_SHADOW_SHIFT);
1057 else
1059 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1060 base_align = crtl->max_used_stack_slot_alignment;
1063 else
1065 /* Large alignment is only processed in the last pass. */
1066 if (pred)
1067 continue;
1068 gcc_assert (large_base != NULL);
1070 large_alloc += alignb - 1;
1071 large_alloc &= -(HOST_WIDE_INT)alignb;
1072 offset = large_alloc;
1073 large_alloc += stack_vars[i].size;
1075 base = large_base;
1076 base_align = large_align;
1079 /* Create rtl for each variable based on their location within the
1080 partition. */
1081 for (j = i; j != EOC; j = stack_vars[j].next)
1083 expand_one_stack_var_at (stack_vars[j].decl,
1084 base, base_align,
1085 offset);
1089 gcc_assert (large_alloc == large_size);
1092 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1093 static HOST_WIDE_INT
1094 account_stack_vars (void)
1096 size_t si, j, i, n = stack_vars_num;
1097 HOST_WIDE_INT size = 0;
1099 for (si = 0; si < n; ++si)
1101 i = stack_vars_sorted[si];
1103 /* Skip variables that aren't partition representatives, for now. */
1104 if (stack_vars[i].representative != i)
1105 continue;
1107 size += stack_vars[i].size;
1108 for (j = i; j != EOC; j = stack_vars[j].next)
1109 set_rtl (stack_vars[j].decl, NULL);
1111 return size;
1114 /* A subroutine of expand_one_var. Called to immediately assign rtl
1115 to a variable to be allocated in the stack frame. */
1117 static void
1118 expand_one_stack_var (tree var)
1120 HOST_WIDE_INT size, offset;
1121 unsigned byte_align;
1123 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1124 byte_align = align_local_variable (SSAVAR (var));
1126 /* We handle highly aligned variables in expand_stack_vars. */
1127 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1129 offset = alloc_stack_frame_space (size, byte_align);
1131 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1132 crtl->max_used_stack_slot_alignment, offset);
1135 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1136 that will reside in a hard register. */
1138 static void
1139 expand_one_hard_reg_var (tree var)
1141 rest_of_decl_compilation (var, 0, 0);
1144 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1145 that will reside in a pseudo register. */
1147 static void
1148 expand_one_register_var (tree var)
1150 tree decl = SSAVAR (var);
1151 tree type = TREE_TYPE (decl);
1152 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1153 rtx x = gen_reg_rtx (reg_mode);
1155 set_rtl (var, x);
1157 /* Note if the object is a user variable. */
1158 if (!DECL_ARTIFICIAL (decl))
1159 mark_user_reg (x);
1161 if (POINTER_TYPE_P (type))
1162 mark_reg_pointer (x, get_pointer_alignment (var));
1165 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1166 has some associated error, e.g. its type is error-mark. We just need
1167 to pick something that won't crash the rest of the compiler. */
1169 static void
1170 expand_one_error_var (tree var)
1172 machine_mode mode = DECL_MODE (var);
1173 rtx x;
1175 if (mode == BLKmode)
1176 x = gen_rtx_MEM (BLKmode, const0_rtx);
1177 else if (mode == VOIDmode)
1178 x = const0_rtx;
1179 else
1180 x = gen_reg_rtx (mode);
1182 SET_DECL_RTL (var, x);
1185 /* A subroutine of expand_one_var. VAR is a variable that will be
1186 allocated to the local stack frame. Return true if we wish to
1187 add VAR to STACK_VARS so that it will be coalesced with other
1188 variables. Return false to allocate VAR immediately.
1190 This function is used to reduce the number of variables considered
1191 for coalescing, which reduces the size of the quadratic problem. */
1193 static bool
1194 defer_stack_allocation (tree var, bool toplevel)
1196 /* Whether the variable is small enough for immediate allocation not to be
1197 a problem with regard to the frame size. */
1198 bool smallish
1199 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1200 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1202 /* If stack protection is enabled, *all* stack variables must be deferred,
1203 so that we can re-order the strings to the top of the frame.
1204 Similarly for Address Sanitizer. */
1205 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1206 return true;
1208 /* We handle "large" alignment via dynamic allocation. We want to handle
1209 this extra complication in only one place, so defer them. */
1210 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1211 return true;
1213 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1214 might be detached from their block and appear at toplevel when we reach
1215 here. We want to coalesce them with variables from other blocks when
1216 the immediate contribution to the frame size would be noticeable. */
1217 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1218 return true;
1220 /* Variables declared in the outermost scope automatically conflict
1221 with every other variable. The only reason to want to defer them
1222 at all is that, after sorting, we can more efficiently pack
1223 small variables in the stack frame. Continue to defer at -O2. */
1224 if (toplevel && optimize < 2)
1225 return false;
1227 /* Without optimization, *most* variables are allocated from the
1228 stack, which makes the quadratic problem large exactly when we
1229 want compilation to proceed as quickly as possible. On the
1230 other hand, we don't want the function's stack frame size to
1231 get completely out of hand. So we avoid adding scalars and
1232 "small" aggregates to the list at all. */
1233 if (optimize == 0 && smallish)
1234 return false;
1236 return true;
1239 /* A subroutine of expand_used_vars. Expand one variable according to
1240 its flavor. Variables to be placed on the stack are not actually
1241 expanded yet, merely recorded.
1242 When REALLY_EXPAND is false, only add stack values to be allocated.
1243 Return stack usage this variable is supposed to take.
1246 static HOST_WIDE_INT
1247 expand_one_var (tree var, bool toplevel, bool really_expand)
1249 unsigned int align = BITS_PER_UNIT;
1250 tree origvar = var;
1252 var = SSAVAR (var);
1254 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1256 /* Because we don't know if VAR will be in register or on stack,
1257 we conservatively assume it will be on stack even if VAR is
1258 eventually put into register after RA pass. For non-automatic
1259 variables, which won't be on stack, we collect alignment of
1260 type and ignore user specified alignment. Similarly for
1261 SSA_NAMEs for which use_register_for_decl returns true. */
1262 if (TREE_STATIC (var)
1263 || DECL_EXTERNAL (var)
1264 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1265 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1266 TYPE_MODE (TREE_TYPE (var)),
1267 TYPE_ALIGN (TREE_TYPE (var)));
1268 else if (DECL_HAS_VALUE_EXPR_P (var)
1269 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1270 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1271 or variables which were assigned a stack slot already by
1272 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1273 changed from the offset chosen to it. */
1274 align = crtl->stack_alignment_estimated;
1275 else
1276 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1278 /* If the variable alignment is very large we'll dynamicaly allocate
1279 it, which means that in-frame portion is just a pointer. */
1280 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1281 align = POINTER_SIZE;
1284 if (SUPPORTS_STACK_ALIGNMENT
1285 && crtl->stack_alignment_estimated < align)
1287 /* stack_alignment_estimated shouldn't change after stack
1288 realign decision made */
1289 gcc_assert (!crtl->stack_realign_processed);
1290 crtl->stack_alignment_estimated = align;
1293 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1294 So here we only make sure stack_alignment_needed >= align. */
1295 if (crtl->stack_alignment_needed < align)
1296 crtl->stack_alignment_needed = align;
1297 if (crtl->max_used_stack_slot_alignment < align)
1298 crtl->max_used_stack_slot_alignment = align;
1300 if (TREE_CODE (origvar) == SSA_NAME)
1302 gcc_assert (TREE_CODE (var) != VAR_DECL
1303 || (!DECL_EXTERNAL (var)
1304 && !DECL_HAS_VALUE_EXPR_P (var)
1305 && !TREE_STATIC (var)
1306 && TREE_TYPE (var) != error_mark_node
1307 && !DECL_HARD_REGISTER (var)
1308 && really_expand));
1310 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1312 else if (DECL_EXTERNAL (var))
1314 else if (DECL_HAS_VALUE_EXPR_P (var))
1316 else if (TREE_STATIC (var))
1318 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1320 else if (TREE_TYPE (var) == error_mark_node)
1322 if (really_expand)
1323 expand_one_error_var (var);
1325 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1327 if (really_expand)
1329 expand_one_hard_reg_var (var);
1330 if (!DECL_HARD_REGISTER (var))
1331 /* Invalid register specification. */
1332 expand_one_error_var (var);
1335 else if (use_register_for_decl (var))
1337 if (really_expand)
1338 expand_one_register_var (origvar);
1340 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1342 /* Reject variables which cover more than half of the address-space. */
1343 if (really_expand)
1345 error ("size of variable %q+D is too large", var);
1346 expand_one_error_var (var);
1349 else if (defer_stack_allocation (var, toplevel))
1350 add_stack_var (origvar);
1351 else
1353 if (really_expand)
1354 expand_one_stack_var (origvar);
1355 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1357 return 0;
1360 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1361 expanding variables. Those variables that can be put into registers
1362 are allocated pseudos; those that can't are put on the stack.
1364 TOPLEVEL is true if this is the outermost BLOCK. */
1366 static void
1367 expand_used_vars_for_block (tree block, bool toplevel)
1369 tree t;
1371 /* Expand all variables at this level. */
1372 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1373 if (TREE_USED (t)
1374 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1375 || !DECL_NONSHAREABLE (t)))
1376 expand_one_var (t, toplevel, true);
1378 /* Expand all variables at containing levels. */
1379 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1380 expand_used_vars_for_block (t, false);
1383 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1384 and clear TREE_USED on all local variables. */
1386 static void
1387 clear_tree_used (tree block)
1389 tree t;
1391 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1392 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1393 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1394 || !DECL_NONSHAREABLE (t))
1395 TREE_USED (t) = 0;
1397 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1398 clear_tree_used (t);
1401 enum {
1402 SPCT_FLAG_DEFAULT = 1,
1403 SPCT_FLAG_ALL = 2,
1404 SPCT_FLAG_STRONG = 3
1407 /* Examine TYPE and determine a bit mask of the following features. */
1409 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1410 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1411 #define SPCT_HAS_ARRAY 4
1412 #define SPCT_HAS_AGGREGATE 8
1414 static unsigned int
1415 stack_protect_classify_type (tree type)
1417 unsigned int ret = 0;
1418 tree t;
1420 switch (TREE_CODE (type))
1422 case ARRAY_TYPE:
1423 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1424 if (t == char_type_node
1425 || t == signed_char_type_node
1426 || t == unsigned_char_type_node)
1428 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1429 unsigned HOST_WIDE_INT len;
1431 if (!TYPE_SIZE_UNIT (type)
1432 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1433 len = max;
1434 else
1435 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1437 if (len < max)
1438 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1439 else
1440 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1442 else
1443 ret = SPCT_HAS_ARRAY;
1444 break;
1446 case UNION_TYPE:
1447 case QUAL_UNION_TYPE:
1448 case RECORD_TYPE:
1449 ret = SPCT_HAS_AGGREGATE;
1450 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1451 if (TREE_CODE (t) == FIELD_DECL)
1452 ret |= stack_protect_classify_type (TREE_TYPE (t));
1453 break;
1455 default:
1456 break;
1459 return ret;
1462 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1463 part of the local stack frame. Remember if we ever return nonzero for
1464 any variable in this function. The return value is the phase number in
1465 which the variable should be allocated. */
1467 static int
1468 stack_protect_decl_phase (tree decl)
1470 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1471 int ret = 0;
1473 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1474 has_short_buffer = true;
1476 if (flag_stack_protect == SPCT_FLAG_ALL
1477 || flag_stack_protect == SPCT_FLAG_STRONG)
1479 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1480 && !(bits & SPCT_HAS_AGGREGATE))
1481 ret = 1;
1482 else if (bits & SPCT_HAS_ARRAY)
1483 ret = 2;
1485 else
1486 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1488 if (ret)
1489 has_protected_decls = true;
1491 return ret;
1494 /* Two helper routines that check for phase 1 and phase 2. These are used
1495 as callbacks for expand_stack_vars. */
1497 static bool
1498 stack_protect_decl_phase_1 (size_t i)
1500 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1503 static bool
1504 stack_protect_decl_phase_2 (size_t i)
1506 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1509 /* And helper function that checks for asan phase (with stack protector
1510 it is phase 3). This is used as callback for expand_stack_vars.
1511 Returns true if any of the vars in the partition need to be protected. */
1513 static bool
1514 asan_decl_phase_3 (size_t i)
1516 while (i != EOC)
1518 if (asan_protect_stack_decl (stack_vars[i].decl))
1519 return true;
1520 i = stack_vars[i].next;
1522 return false;
1525 /* Ensure that variables in different stack protection phases conflict
1526 so that they are not merged and share the same stack slot. */
1528 static void
1529 add_stack_protection_conflicts (void)
1531 size_t i, j, n = stack_vars_num;
1532 unsigned char *phase;
1534 phase = XNEWVEC (unsigned char, n);
1535 for (i = 0; i < n; ++i)
1536 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1538 for (i = 0; i < n; ++i)
1540 unsigned char ph_i = phase[i];
1541 for (j = i + 1; j < n; ++j)
1542 if (ph_i != phase[j])
1543 add_stack_var_conflict (i, j);
1546 XDELETEVEC (phase);
1549 /* Create a decl for the guard at the top of the stack frame. */
1551 static void
1552 create_stack_guard (void)
1554 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1555 VAR_DECL, NULL, ptr_type_node);
1556 TREE_THIS_VOLATILE (guard) = 1;
1557 TREE_USED (guard) = 1;
1558 expand_one_stack_var (guard);
1559 crtl->stack_protect_guard = guard;
1562 /* Prepare for expanding variables. */
1563 static void
1564 init_vars_expansion (void)
1566 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1567 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1569 /* A map from decl to stack partition. */
1570 decl_to_stack_part = new hash_map<tree, size_t>;
1572 /* Initialize local stack smashing state. */
1573 has_protected_decls = false;
1574 has_short_buffer = false;
1577 /* Free up stack variable graph data. */
1578 static void
1579 fini_vars_expansion (void)
1581 bitmap_obstack_release (&stack_var_bitmap_obstack);
1582 if (stack_vars)
1583 XDELETEVEC (stack_vars);
1584 if (stack_vars_sorted)
1585 XDELETEVEC (stack_vars_sorted);
1586 stack_vars = NULL;
1587 stack_vars_sorted = NULL;
1588 stack_vars_alloc = stack_vars_num = 0;
1589 delete decl_to_stack_part;
1590 decl_to_stack_part = NULL;
1593 /* Make a fair guess for the size of the stack frame of the function
1594 in NODE. This doesn't have to be exact, the result is only used in
1595 the inline heuristics. So we don't want to run the full stack var
1596 packing algorithm (which is quadratic in the number of stack vars).
1597 Instead, we calculate the total size of all stack vars. This turns
1598 out to be a pretty fair estimate -- packing of stack vars doesn't
1599 happen very often. */
1601 HOST_WIDE_INT
1602 estimated_stack_frame_size (struct cgraph_node *node)
1604 HOST_WIDE_INT size = 0;
1605 size_t i;
1606 tree var;
1607 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1609 push_cfun (fn);
1611 init_vars_expansion ();
1613 FOR_EACH_LOCAL_DECL (fn, i, var)
1614 if (auto_var_in_fn_p (var, fn->decl))
1615 size += expand_one_var (var, true, false);
1617 if (stack_vars_num > 0)
1619 /* Fake sorting the stack vars for account_stack_vars (). */
1620 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1621 for (i = 0; i < stack_vars_num; ++i)
1622 stack_vars_sorted[i] = i;
1623 size += account_stack_vars ();
1626 fini_vars_expansion ();
1627 pop_cfun ();
1628 return size;
1631 /* Helper routine to check if a record or union contains an array field. */
1633 static int
1634 record_or_union_type_has_array_p (const_tree tree_type)
1636 tree fields = TYPE_FIELDS (tree_type);
1637 tree f;
1639 for (f = fields; f; f = DECL_CHAIN (f))
1640 if (TREE_CODE (f) == FIELD_DECL)
1642 tree field_type = TREE_TYPE (f);
1643 if (RECORD_OR_UNION_TYPE_P (field_type)
1644 && record_or_union_type_has_array_p (field_type))
1645 return 1;
1646 if (TREE_CODE (field_type) == ARRAY_TYPE)
1647 return 1;
1649 return 0;
1652 /* Check if the current function has local referenced variables that
1653 have their addresses taken, contain an array, or are arrays. */
1655 static bool
1656 stack_protect_decl_p ()
1658 unsigned i;
1659 tree var;
1661 FOR_EACH_LOCAL_DECL (cfun, i, var)
1662 if (!is_global_var (var))
1664 tree var_type = TREE_TYPE (var);
1665 if (TREE_CODE (var) == VAR_DECL
1666 && (TREE_CODE (var_type) == ARRAY_TYPE
1667 || TREE_ADDRESSABLE (var)
1668 || (RECORD_OR_UNION_TYPE_P (var_type)
1669 && record_or_union_type_has_array_p (var_type))))
1670 return true;
1672 return false;
1675 /* Check if the current function has calls that use a return slot. */
1677 static bool
1678 stack_protect_return_slot_p ()
1680 basic_block bb;
1682 FOR_ALL_BB_FN (bb, cfun)
1683 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1684 !gsi_end_p (gsi); gsi_next (&gsi))
1686 gimple stmt = gsi_stmt (gsi);
1687 /* This assumes that calls to internal-only functions never
1688 use a return slot. */
1689 if (is_gimple_call (stmt)
1690 && !gimple_call_internal_p (stmt)
1691 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1692 gimple_call_fndecl (stmt)))
1693 return true;
1695 return false;
1698 /* Expand all variables used in the function. */
1700 static rtx_insn *
1701 expand_used_vars (void)
1703 tree var, outer_block = DECL_INITIAL (current_function_decl);
1704 vec<tree> maybe_local_decls = vNULL;
1705 rtx_insn *var_end_seq = NULL;
1706 unsigned i;
1707 unsigned len;
1708 bool gen_stack_protect_signal = false;
1710 /* Compute the phase of the stack frame for this function. */
1712 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1713 int off = STARTING_FRAME_OFFSET % align;
1714 frame_phase = off ? align - off : 0;
1717 /* Set TREE_USED on all variables in the local_decls. */
1718 FOR_EACH_LOCAL_DECL (cfun, i, var)
1719 TREE_USED (var) = 1;
1720 /* Clear TREE_USED on all variables associated with a block scope. */
1721 clear_tree_used (DECL_INITIAL (current_function_decl));
1723 init_vars_expansion ();
1725 hash_map<tree, tree> ssa_name_decls;
1726 for (i = 0; i < SA.map->num_partitions; i++)
1728 tree var = partition_to_var (SA.map, i);
1730 gcc_assert (!virtual_operand_p (var));
1732 /* Assign decls to each SSA name partition, share decls for partitions
1733 we could have coalesced (those with the same type). */
1734 if (SSA_NAME_VAR (var) == NULL_TREE)
1736 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1737 if (!*slot)
1738 *slot = create_tmp_reg (TREE_TYPE (var), NULL);
1739 replace_ssa_name_symbol (var, *slot);
1742 /* Always allocate space for partitions based on VAR_DECLs. But for
1743 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1744 debug info, there is no need to do so if optimization is disabled
1745 because all the SSA_NAMEs based on these DECLs have been coalesced
1746 into a single partition, which is thus assigned the canonical RTL
1747 location of the DECLs. If in_lto_p, we can't rely on optimize,
1748 a function could be compiled with -O1 -flto first and only the
1749 link performed at -O0. */
1750 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1751 expand_one_var (var, true, true);
1752 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1754 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1755 contain the default def (representing the parm or result itself)
1756 we don't do anything here. But those which don't contain the
1757 default def (representing a temporary based on the parm/result)
1758 we need to allocate space just like for normal VAR_DECLs. */
1759 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1761 expand_one_var (var, true, true);
1762 gcc_assert (SA.partition_to_pseudo[i]);
1767 if (flag_stack_protect == SPCT_FLAG_STRONG)
1768 gen_stack_protect_signal
1769 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1771 /* At this point all variables on the local_decls with TREE_USED
1772 set are not associated with any block scope. Lay them out. */
1774 len = vec_safe_length (cfun->local_decls);
1775 FOR_EACH_LOCAL_DECL (cfun, i, var)
1777 bool expand_now = false;
1779 /* Expanded above already. */
1780 if (is_gimple_reg (var))
1782 TREE_USED (var) = 0;
1783 goto next;
1785 /* We didn't set a block for static or extern because it's hard
1786 to tell the difference between a global variable (re)declared
1787 in a local scope, and one that's really declared there to
1788 begin with. And it doesn't really matter much, since we're
1789 not giving them stack space. Expand them now. */
1790 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1791 expand_now = true;
1793 /* Expand variables not associated with any block now. Those created by
1794 the optimizers could be live anywhere in the function. Those that
1795 could possibly have been scoped originally and detached from their
1796 block will have their allocation deferred so we coalesce them with
1797 others when optimization is enabled. */
1798 else if (TREE_USED (var))
1799 expand_now = true;
1801 /* Finally, mark all variables on the list as used. We'll use
1802 this in a moment when we expand those associated with scopes. */
1803 TREE_USED (var) = 1;
1805 if (expand_now)
1806 expand_one_var (var, true, true);
1808 next:
1809 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1811 rtx rtl = DECL_RTL_IF_SET (var);
1813 /* Keep artificial non-ignored vars in cfun->local_decls
1814 chain until instantiate_decls. */
1815 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1816 add_local_decl (cfun, var);
1817 else if (rtl == NULL_RTX)
1818 /* If rtl isn't set yet, which can happen e.g. with
1819 -fstack-protector, retry before returning from this
1820 function. */
1821 maybe_local_decls.safe_push (var);
1825 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1827 +-----------------+-----------------+
1828 | ...processed... | ...duplicates...|
1829 +-----------------+-----------------+
1831 +-- LEN points here.
1833 We just want the duplicates, as those are the artificial
1834 non-ignored vars that we want to keep until instantiate_decls.
1835 Move them down and truncate the array. */
1836 if (!vec_safe_is_empty (cfun->local_decls))
1837 cfun->local_decls->block_remove (0, len);
1839 /* At this point, all variables within the block tree with TREE_USED
1840 set are actually used by the optimized function. Lay them out. */
1841 expand_used_vars_for_block (outer_block, true);
1843 if (stack_vars_num > 0)
1845 add_scope_conflicts ();
1847 /* If stack protection is enabled, we don't share space between
1848 vulnerable data and non-vulnerable data. */
1849 if (flag_stack_protect)
1850 add_stack_protection_conflicts ();
1852 /* Now that we have collected all stack variables, and have computed a
1853 minimal interference graph, attempt to save some stack space. */
1854 partition_stack_vars ();
1855 if (dump_file)
1856 dump_stack_var_partition ();
1859 switch (flag_stack_protect)
1861 case SPCT_FLAG_ALL:
1862 create_stack_guard ();
1863 break;
1865 case SPCT_FLAG_STRONG:
1866 if (gen_stack_protect_signal
1867 || cfun->calls_alloca || has_protected_decls)
1868 create_stack_guard ();
1869 break;
1871 case SPCT_FLAG_DEFAULT:
1872 if (cfun->calls_alloca || has_protected_decls)
1873 create_stack_guard ();
1874 break;
1876 default:
1880 /* Assign rtl to each variable based on these partitions. */
1881 if (stack_vars_num > 0)
1883 struct stack_vars_data data;
1885 data.asan_vec = vNULL;
1886 data.asan_decl_vec = vNULL;
1887 data.asan_base = NULL_RTX;
1888 data.asan_alignb = 0;
1890 /* Reorder decls to be protected by iterating over the variables
1891 array multiple times, and allocating out of each phase in turn. */
1892 /* ??? We could probably integrate this into the qsort we did
1893 earlier, such that we naturally see these variables first,
1894 and thus naturally allocate things in the right order. */
1895 if (has_protected_decls)
1897 /* Phase 1 contains only character arrays. */
1898 expand_stack_vars (stack_protect_decl_phase_1, &data);
1900 /* Phase 2 contains other kinds of arrays. */
1901 if (flag_stack_protect == 2)
1902 expand_stack_vars (stack_protect_decl_phase_2, &data);
1905 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1906 /* Phase 3, any partitions that need asan protection
1907 in addition to phase 1 and 2. */
1908 expand_stack_vars (asan_decl_phase_3, &data);
1910 if (!data.asan_vec.is_empty ())
1912 HOST_WIDE_INT prev_offset = frame_offset;
1913 HOST_WIDE_INT offset, sz, redzonesz;
1914 redzonesz = ASAN_RED_ZONE_SIZE;
1915 sz = data.asan_vec[0] - prev_offset;
1916 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1917 && data.asan_alignb <= 4096
1918 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1919 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1920 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1921 offset
1922 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1923 data.asan_vec.safe_push (prev_offset);
1924 data.asan_vec.safe_push (offset);
1925 /* Leave space for alignment if STRICT_ALIGNMENT. */
1926 if (STRICT_ALIGNMENT)
1927 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1928 << ASAN_SHADOW_SHIFT)
1929 / BITS_PER_UNIT, 1);
1931 var_end_seq
1932 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1933 data.asan_base,
1934 data.asan_alignb,
1935 data.asan_vec.address (),
1936 data.asan_decl_vec.address (),
1937 data.asan_vec.length ());
1940 expand_stack_vars (NULL, &data);
1942 data.asan_vec.release ();
1943 data.asan_decl_vec.release ();
1946 fini_vars_expansion ();
1948 /* If there were any artificial non-ignored vars without rtl
1949 found earlier, see if deferred stack allocation hasn't assigned
1950 rtl to them. */
1951 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1953 rtx rtl = DECL_RTL_IF_SET (var);
1955 /* Keep artificial non-ignored vars in cfun->local_decls
1956 chain until instantiate_decls. */
1957 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1958 add_local_decl (cfun, var);
1960 maybe_local_decls.release ();
1962 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1963 if (STACK_ALIGNMENT_NEEDED)
1965 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1966 if (!FRAME_GROWS_DOWNWARD)
1967 frame_offset += align - 1;
1968 frame_offset &= -align;
1971 return var_end_seq;
1975 /* If we need to produce a detailed dump, print the tree representation
1976 for STMT to the dump file. SINCE is the last RTX after which the RTL
1977 generated for STMT should have been appended. */
1979 static void
1980 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
1982 if (dump_file && (dump_flags & TDF_DETAILS))
1984 fprintf (dump_file, "\n;; ");
1985 print_gimple_stmt (dump_file, stmt, 0,
1986 TDF_SLIM | (dump_flags & TDF_LINENO));
1987 fprintf (dump_file, "\n");
1989 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1993 /* Maps the blocks that do not contain tree labels to rtx labels. */
1995 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
1997 /* Returns the label_rtx expression for a label starting basic block BB. */
1999 static rtx
2000 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2002 gimple_stmt_iterator gsi;
2003 tree lab;
2004 gimple lab_stmt;
2006 if (bb->flags & BB_RTL)
2007 return block_label (bb);
2009 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2010 if (elt)
2011 return *elt;
2013 /* Find the tree label if it is present. */
2015 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2017 lab_stmt = gsi_stmt (gsi);
2018 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
2019 break;
2021 lab = gimple_label_label (lab_stmt);
2022 if (DECL_NONLOCAL (lab))
2023 break;
2025 return label_rtx (lab);
2028 rtx_code_label *l = gen_label_rtx ();
2029 lab_rtx_for_bb->put (bb, l);
2030 return l;
2034 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2035 of a basic block where we just expanded the conditional at the end,
2036 possibly clean up the CFG and instruction sequence. LAST is the
2037 last instruction before the just emitted jump sequence. */
2039 static void
2040 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2042 /* Special case: when jumpif decides that the condition is
2043 trivial it emits an unconditional jump (and the necessary
2044 barrier). But we still have two edges, the fallthru one is
2045 wrong. purge_dead_edges would clean this up later. Unfortunately
2046 we have to insert insns (and split edges) before
2047 find_many_sub_basic_blocks and hence before purge_dead_edges.
2048 But splitting edges might create new blocks which depend on the
2049 fact that if there are two edges there's no barrier. So the
2050 barrier would get lost and verify_flow_info would ICE. Instead
2051 of auditing all edge splitters to care for the barrier (which
2052 normally isn't there in a cleaned CFG), fix it here. */
2053 if (BARRIER_P (get_last_insn ()))
2055 rtx_insn *insn;
2056 remove_edge (e);
2057 /* Now, we have a single successor block, if we have insns to
2058 insert on the remaining edge we potentially will insert
2059 it at the end of this block (if the dest block isn't feasible)
2060 in order to avoid splitting the edge. This insertion will take
2061 place in front of the last jump. But we might have emitted
2062 multiple jumps (conditional and one unconditional) to the
2063 same destination. Inserting in front of the last one then
2064 is a problem. See PR 40021. We fix this by deleting all
2065 jumps except the last unconditional one. */
2066 insn = PREV_INSN (get_last_insn ());
2067 /* Make sure we have an unconditional jump. Otherwise we're
2068 confused. */
2069 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2070 for (insn = PREV_INSN (insn); insn != last;)
2072 insn = PREV_INSN (insn);
2073 if (JUMP_P (NEXT_INSN (insn)))
2075 if (!any_condjump_p (NEXT_INSN (insn)))
2077 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2078 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2080 delete_insn (NEXT_INSN (insn));
2086 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2087 Returns a new basic block if we've terminated the current basic
2088 block and created a new one. */
2090 static basic_block
2091 expand_gimple_cond (basic_block bb, gimple stmt)
2093 basic_block new_bb, dest;
2094 edge new_edge;
2095 edge true_edge;
2096 edge false_edge;
2097 rtx_insn *last2, *last;
2098 enum tree_code code;
2099 tree op0, op1;
2101 code = gimple_cond_code (stmt);
2102 op0 = gimple_cond_lhs (stmt);
2103 op1 = gimple_cond_rhs (stmt);
2104 /* We're sometimes presented with such code:
2105 D.123_1 = x < y;
2106 if (D.123_1 != 0)
2108 This would expand to two comparisons which then later might
2109 be cleaned up by combine. But some pattern matchers like if-conversion
2110 work better when there's only one compare, so make up for this
2111 here as special exception if TER would have made the same change. */
2112 if (SA.values
2113 && TREE_CODE (op0) == SSA_NAME
2114 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2115 && TREE_CODE (op1) == INTEGER_CST
2116 && ((gimple_cond_code (stmt) == NE_EXPR
2117 && integer_zerop (op1))
2118 || (gimple_cond_code (stmt) == EQ_EXPR
2119 && integer_onep (op1)))
2120 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2122 gimple second = SSA_NAME_DEF_STMT (op0);
2123 if (gimple_code (second) == GIMPLE_ASSIGN)
2125 enum tree_code code2 = gimple_assign_rhs_code (second);
2126 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2128 code = code2;
2129 op0 = gimple_assign_rhs1 (second);
2130 op1 = gimple_assign_rhs2 (second);
2132 /* If jumps are cheap turn some more codes into
2133 jumpy sequences. */
2134 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2136 if ((code2 == BIT_AND_EXPR
2137 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2138 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2139 || code2 == TRUTH_AND_EXPR)
2141 code = TRUTH_ANDIF_EXPR;
2142 op0 = gimple_assign_rhs1 (second);
2143 op1 = gimple_assign_rhs2 (second);
2145 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2147 code = TRUTH_ORIF_EXPR;
2148 op0 = gimple_assign_rhs1 (second);
2149 op1 = gimple_assign_rhs2 (second);
2155 last2 = last = get_last_insn ();
2157 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2158 set_curr_insn_location (gimple_location (stmt));
2160 /* These flags have no purpose in RTL land. */
2161 true_edge->flags &= ~EDGE_TRUE_VALUE;
2162 false_edge->flags &= ~EDGE_FALSE_VALUE;
2164 /* We can either have a pure conditional jump with one fallthru edge or
2165 two-way jump that needs to be decomposed into two basic blocks. */
2166 if (false_edge->dest == bb->next_bb)
2168 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2169 true_edge->probability);
2170 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2171 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2172 set_curr_insn_location (true_edge->goto_locus);
2173 false_edge->flags |= EDGE_FALLTHRU;
2174 maybe_cleanup_end_of_block (false_edge, last);
2175 return NULL;
2177 if (true_edge->dest == bb->next_bb)
2179 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2180 false_edge->probability);
2181 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2182 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2183 set_curr_insn_location (false_edge->goto_locus);
2184 true_edge->flags |= EDGE_FALLTHRU;
2185 maybe_cleanup_end_of_block (true_edge, last);
2186 return NULL;
2189 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2190 true_edge->probability);
2191 last = get_last_insn ();
2192 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2193 set_curr_insn_location (false_edge->goto_locus);
2194 emit_jump (label_rtx_for_bb (false_edge->dest));
2196 BB_END (bb) = last;
2197 if (BARRIER_P (BB_END (bb)))
2198 BB_END (bb) = PREV_INSN (BB_END (bb));
2199 update_bb_for_insn (bb);
2201 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2202 dest = false_edge->dest;
2203 redirect_edge_succ (false_edge, new_bb);
2204 false_edge->flags |= EDGE_FALLTHRU;
2205 new_bb->count = false_edge->count;
2206 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2207 add_bb_to_loop (new_bb, bb->loop_father);
2208 new_edge = make_edge (new_bb, dest, 0);
2209 new_edge->probability = REG_BR_PROB_BASE;
2210 new_edge->count = new_bb->count;
2211 if (BARRIER_P (BB_END (new_bb)))
2212 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2213 update_bb_for_insn (new_bb);
2215 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2217 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2219 set_curr_insn_location (true_edge->goto_locus);
2220 true_edge->goto_locus = curr_insn_location ();
2223 return new_bb;
2226 /* Mark all calls that can have a transaction restart. */
2228 static void
2229 mark_transaction_restart_calls (gimple stmt)
2231 struct tm_restart_node dummy;
2232 void **slot;
2234 if (!cfun->gimple_df->tm_restart)
2235 return;
2237 dummy.stmt = stmt;
2238 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2239 if (slot)
2241 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2242 tree list = n->label_or_list;
2243 rtx_insn *insn;
2245 for (insn = next_real_insn (get_last_insn ());
2246 !CALL_P (insn);
2247 insn = next_real_insn (insn))
2248 continue;
2250 if (TREE_CODE (list) == LABEL_DECL)
2251 add_reg_note (insn, REG_TM, label_rtx (list));
2252 else
2253 for (; list ; list = TREE_CHAIN (list))
2254 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2258 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2259 statement STMT. */
2261 static void
2262 expand_call_stmt (gimple stmt)
2264 tree exp, decl, lhs;
2265 bool builtin_p;
2266 size_t i;
2268 if (gimple_call_internal_p (stmt))
2270 expand_internal_call (stmt);
2271 return;
2274 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2276 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2277 decl = gimple_call_fndecl (stmt);
2278 builtin_p = decl && DECL_BUILT_IN (decl);
2280 /* If this is not a builtin function, the function type through which the
2281 call is made may be different from the type of the function. */
2282 if (!builtin_p)
2283 CALL_EXPR_FN (exp)
2284 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2285 CALL_EXPR_FN (exp));
2287 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2288 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2290 for (i = 0; i < gimple_call_num_args (stmt); i++)
2292 tree arg = gimple_call_arg (stmt, i);
2293 gimple def;
2294 /* TER addresses into arguments of builtin functions so we have a
2295 chance to infer more correct alignment information. See PR39954. */
2296 if (builtin_p
2297 && TREE_CODE (arg) == SSA_NAME
2298 && (def = get_gimple_for_ssa_name (arg))
2299 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2300 arg = gimple_assign_rhs1 (def);
2301 CALL_EXPR_ARG (exp, i) = arg;
2304 if (gimple_has_side_effects (stmt))
2305 TREE_SIDE_EFFECTS (exp) = 1;
2307 if (gimple_call_nothrow_p (stmt))
2308 TREE_NOTHROW (exp) = 1;
2310 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2311 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2312 if (decl
2313 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2314 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2315 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2316 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2317 else
2318 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2319 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2320 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2321 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2323 /* Ensure RTL is created for debug args. */
2324 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2326 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2327 unsigned int ix;
2328 tree dtemp;
2330 if (debug_args)
2331 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2333 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2334 expand_debug_expr (dtemp);
2338 lhs = gimple_call_lhs (stmt);
2339 if (lhs)
2340 expand_assignment (lhs, exp, false);
2341 else
2342 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2344 mark_transaction_restart_calls (stmt);
2348 /* Generate RTL for an asm statement (explicit assembler code).
2349 STRING is a STRING_CST node containing the assembler code text,
2350 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2351 insn is volatile; don't optimize it. */
2353 static void
2354 expand_asm_loc (tree string, int vol, location_t locus)
2356 rtx body;
2358 if (TREE_CODE (string) == ADDR_EXPR)
2359 string = TREE_OPERAND (string, 0);
2361 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2362 ggc_strdup (TREE_STRING_POINTER (string)),
2363 locus);
2365 MEM_VOLATILE_P (body) = vol;
2367 emit_insn (body);
2370 /* Return the number of times character C occurs in string S. */
2371 static int
2372 n_occurrences (int c, const char *s)
2374 int n = 0;
2375 while (*s)
2376 n += (*s++ == c);
2377 return n;
2380 /* A subroutine of expand_asm_operands. Check that all operands have
2381 the same number of alternatives. Return true if so. */
2383 static bool
2384 check_operand_nalternatives (tree outputs, tree inputs)
2386 if (outputs || inputs)
2388 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2389 int nalternatives
2390 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2391 tree next = inputs;
2393 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2395 error ("too many alternatives in %<asm%>");
2396 return false;
2399 tmp = outputs;
2400 while (tmp)
2402 const char *constraint
2403 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2405 if (n_occurrences (',', constraint) != nalternatives)
2407 error ("operand constraints for %<asm%> differ "
2408 "in number of alternatives");
2409 return false;
2412 if (TREE_CHAIN (tmp))
2413 tmp = TREE_CHAIN (tmp);
2414 else
2415 tmp = next, next = 0;
2419 return true;
2422 /* Check for overlap between registers marked in CLOBBERED_REGS and
2423 anything inappropriate in T. Emit error and return the register
2424 variable definition for error, NULL_TREE for ok. */
2426 static bool
2427 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2429 /* Conflicts between asm-declared register variables and the clobber
2430 list are not allowed. */
2431 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2433 if (overlap)
2435 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2436 DECL_NAME (overlap));
2438 /* Reset registerness to stop multiple errors emitted for a single
2439 variable. */
2440 DECL_REGISTER (overlap) = 0;
2441 return true;
2444 return false;
2447 /* Generate RTL for an asm statement with arguments.
2448 STRING is the instruction template.
2449 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2450 Each output or input has an expression in the TREE_VALUE and
2451 a tree list in TREE_PURPOSE which in turn contains a constraint
2452 name in TREE_VALUE (or NULL_TREE) and a constraint string
2453 in TREE_PURPOSE.
2454 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2455 that is clobbered by this insn.
2457 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2458 should be the fallthru basic block of the asm goto.
2460 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2461 Some elements of OUTPUTS may be replaced with trees representing temporary
2462 values. The caller should copy those temporary values to the originally
2463 specified lvalues.
2465 VOL nonzero means the insn is volatile; don't optimize it. */
2467 static void
2468 expand_asm_operands (tree string, tree outputs, tree inputs,
2469 tree clobbers, tree labels, basic_block fallthru_bb,
2470 int vol, location_t locus)
2472 rtvec argvec, constraintvec, labelvec;
2473 rtx body;
2474 int ninputs = list_length (inputs);
2475 int noutputs = list_length (outputs);
2476 int nlabels = list_length (labels);
2477 int ninout;
2478 int nclobbers;
2479 HARD_REG_SET clobbered_regs;
2480 int clobber_conflict_found = 0;
2481 tree tail;
2482 tree t;
2483 int i;
2484 /* Vector of RTX's of evaluated output operands. */
2485 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2486 int *inout_opnum = XALLOCAVEC (int, noutputs);
2487 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2488 machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2489 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2490 int old_generating_concat_p = generating_concat_p;
2491 rtx_code_label *fallthru_label = NULL;
2493 /* An ASM with no outputs needs to be treated as volatile, for now. */
2494 if (noutputs == 0)
2495 vol = 1;
2497 if (! check_operand_nalternatives (outputs, inputs))
2498 return;
2500 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2502 /* Collect constraints. */
2503 i = 0;
2504 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2505 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2506 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2507 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2509 /* Sometimes we wish to automatically clobber registers across an asm.
2510 Case in point is when the i386 backend moved from cc0 to a hard reg --
2511 maintaining source-level compatibility means automatically clobbering
2512 the flags register. */
2513 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2515 /* Count the number of meaningful clobbered registers, ignoring what
2516 we would ignore later. */
2517 nclobbers = 0;
2518 CLEAR_HARD_REG_SET (clobbered_regs);
2519 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2521 const char *regname;
2522 int nregs;
2524 if (TREE_VALUE (tail) == error_mark_node)
2525 return;
2526 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2528 i = decode_reg_name_and_count (regname, &nregs);
2529 if (i == -4)
2530 ++nclobbers;
2531 else if (i == -2)
2532 error ("unknown register name %qs in %<asm%>", regname);
2534 /* Mark clobbered registers. */
2535 if (i >= 0)
2537 int reg;
2539 for (reg = i; reg < i + nregs; reg++)
2541 ++nclobbers;
2543 /* Clobbering the PIC register is an error. */
2544 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2546 error ("PIC register clobbered by %qs in %<asm%>", regname);
2547 return;
2550 SET_HARD_REG_BIT (clobbered_regs, reg);
2555 /* First pass over inputs and outputs checks validity and sets
2556 mark_addressable if needed. */
2558 ninout = 0;
2559 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2561 tree val = TREE_VALUE (tail);
2562 tree type = TREE_TYPE (val);
2563 const char *constraint;
2564 bool is_inout;
2565 bool allows_reg;
2566 bool allows_mem;
2568 /* If there's an erroneous arg, emit no insn. */
2569 if (type == error_mark_node)
2570 return;
2572 /* Try to parse the output constraint. If that fails, there's
2573 no point in going further. */
2574 constraint = constraints[i];
2575 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2576 &allows_mem, &allows_reg, &is_inout))
2577 return;
2579 if (! allows_reg
2580 && (allows_mem
2581 || is_inout
2582 || (DECL_P (val)
2583 && REG_P (DECL_RTL (val))
2584 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2585 mark_addressable (val);
2587 if (is_inout)
2588 ninout++;
2591 ninputs += ninout;
2592 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2594 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2595 return;
2598 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2600 bool allows_reg, allows_mem;
2601 const char *constraint;
2603 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2604 would get VOIDmode and that could cause a crash in reload. */
2605 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2606 return;
2608 constraint = constraints[i + noutputs];
2609 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2610 constraints, &allows_mem, &allows_reg))
2611 return;
2613 if (! allows_reg && allows_mem)
2614 mark_addressable (TREE_VALUE (tail));
2617 /* Second pass evaluates arguments. */
2619 /* Make sure stack is consistent for asm goto. */
2620 if (nlabels > 0)
2621 do_pending_stack_adjust ();
2623 ninout = 0;
2624 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2626 tree val = TREE_VALUE (tail);
2627 tree type = TREE_TYPE (val);
2628 bool is_inout;
2629 bool allows_reg;
2630 bool allows_mem;
2631 rtx op;
2632 bool ok;
2634 ok = parse_output_constraint (&constraints[i], i, ninputs,
2635 noutputs, &allows_mem, &allows_reg,
2636 &is_inout);
2637 gcc_assert (ok);
2639 /* If an output operand is not a decl or indirect ref and our constraint
2640 allows a register, make a temporary to act as an intermediate.
2641 Make the asm insn write into that, then our caller will copy it to
2642 the real output operand. Likewise for promoted variables. */
2644 generating_concat_p = 0;
2646 real_output_rtx[i] = NULL_RTX;
2647 if ((TREE_CODE (val) == INDIRECT_REF
2648 && allows_mem)
2649 || (DECL_P (val)
2650 && (allows_mem || REG_P (DECL_RTL (val)))
2651 && ! (REG_P (DECL_RTL (val))
2652 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2653 || ! allows_reg
2654 || is_inout)
2656 op = expand_expr (val, NULL_RTX, VOIDmode,
2657 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2658 if (MEM_P (op))
2659 op = validize_mem (op);
2661 if (! allows_reg && !MEM_P (op))
2662 error ("output number %d not directly addressable", i);
2663 if ((! allows_mem && MEM_P (op))
2664 || GET_CODE (op) == CONCAT)
2666 real_output_rtx[i] = op;
2667 op = gen_reg_rtx (GET_MODE (op));
2668 if (is_inout)
2669 emit_move_insn (op, real_output_rtx[i]);
2672 else
2674 op = assign_temp (type, 0, 1);
2675 op = validize_mem (op);
2676 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2677 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2678 TREE_VALUE (tail) = make_tree (type, op);
2680 output_rtx[i] = op;
2682 generating_concat_p = old_generating_concat_p;
2684 if (is_inout)
2686 inout_mode[ninout] = TYPE_MODE (type);
2687 inout_opnum[ninout++] = i;
2690 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2691 clobber_conflict_found = 1;
2694 /* Make vectors for the expression-rtx, constraint strings,
2695 and named operands. */
2697 argvec = rtvec_alloc (ninputs);
2698 constraintvec = rtvec_alloc (ninputs);
2699 labelvec = rtvec_alloc (nlabels);
2701 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2702 : GET_MODE (output_rtx[0])),
2703 ggc_strdup (TREE_STRING_POINTER (string)),
2704 empty_string, 0, argvec, constraintvec,
2705 labelvec, locus);
2707 MEM_VOLATILE_P (body) = vol;
2709 /* Eval the inputs and put them into ARGVEC.
2710 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2712 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2714 bool allows_reg, allows_mem;
2715 const char *constraint;
2716 tree val, type;
2717 rtx op;
2718 bool ok;
2720 constraint = constraints[i + noutputs];
2721 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2722 constraints, &allows_mem, &allows_reg);
2723 gcc_assert (ok);
2725 generating_concat_p = 0;
2727 val = TREE_VALUE (tail);
2728 type = TREE_TYPE (val);
2729 /* EXPAND_INITIALIZER will not generate code for valid initializer
2730 constants, but will still generate code for other types of operand.
2731 This is the behavior we want for constant constraints. */
2732 op = expand_expr (val, NULL_RTX, VOIDmode,
2733 allows_reg ? EXPAND_NORMAL
2734 : allows_mem ? EXPAND_MEMORY
2735 : EXPAND_INITIALIZER);
2737 /* Never pass a CONCAT to an ASM. */
2738 if (GET_CODE (op) == CONCAT)
2739 op = force_reg (GET_MODE (op), op);
2740 else if (MEM_P (op))
2741 op = validize_mem (op);
2743 if (asm_operand_ok (op, constraint, NULL) <= 0)
2745 if (allows_reg && TYPE_MODE (type) != BLKmode)
2746 op = force_reg (TYPE_MODE (type), op);
2747 else if (!allows_mem)
2748 warning (0, "asm operand %d probably doesn%'t match constraints",
2749 i + noutputs);
2750 else if (MEM_P (op))
2752 /* We won't recognize either volatile memory or memory
2753 with a queued address as available a memory_operand
2754 at this point. Ignore it: clearly this *is* a memory. */
2756 else
2757 gcc_unreachable ();
2760 generating_concat_p = old_generating_concat_p;
2761 ASM_OPERANDS_INPUT (body, i) = op;
2763 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2764 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2765 ggc_strdup (constraints[i + noutputs]),
2766 locus);
2768 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2769 clobber_conflict_found = 1;
2772 /* Protect all the operands from the queue now that they have all been
2773 evaluated. */
2775 generating_concat_p = 0;
2777 /* For in-out operands, copy output rtx to input rtx. */
2778 for (i = 0; i < ninout; i++)
2780 int j = inout_opnum[i];
2781 char buffer[16];
2783 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2784 = output_rtx[j];
2786 sprintf (buffer, "%d", j);
2787 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2788 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2791 /* Copy labels to the vector. */
2792 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2794 rtx r;
2795 /* If asm goto has any labels in the fallthru basic block, use
2796 a label that we emit immediately after the asm goto. Expansion
2797 may insert further instructions into the same basic block after
2798 asm goto and if we don't do this, insertion of instructions on
2799 the fallthru edge might misbehave. See PR58670. */
2800 if (fallthru_bb
2801 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2803 if (fallthru_label == NULL_RTX)
2804 fallthru_label = gen_label_rtx ();
2805 r = fallthru_label;
2807 else
2808 r = label_rtx (TREE_VALUE (tail));
2809 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2812 generating_concat_p = old_generating_concat_p;
2814 /* Now, for each output, construct an rtx
2815 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2816 ARGVEC CONSTRAINTS OPNAMES))
2817 If there is more than one, put them inside a PARALLEL. */
2819 if (nlabels > 0 && nclobbers == 0)
2821 gcc_assert (noutputs == 0);
2822 emit_jump_insn (body);
2824 else if (noutputs == 0 && nclobbers == 0)
2826 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2827 emit_insn (body);
2829 else if (noutputs == 1 && nclobbers == 0)
2831 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2832 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2834 else
2836 rtx obody = body;
2837 int num = noutputs;
2839 if (num == 0)
2840 num = 1;
2842 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2844 /* For each output operand, store a SET. */
2845 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2847 XVECEXP (body, 0, i)
2848 = gen_rtx_SET (VOIDmode,
2849 output_rtx[i],
2850 gen_rtx_ASM_OPERANDS
2851 (GET_MODE (output_rtx[i]),
2852 ggc_strdup (TREE_STRING_POINTER (string)),
2853 ggc_strdup (constraints[i]),
2854 i, argvec, constraintvec, labelvec, locus));
2856 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2859 /* If there are no outputs (but there are some clobbers)
2860 store the bare ASM_OPERANDS into the PARALLEL. */
2862 if (i == 0)
2863 XVECEXP (body, 0, i++) = obody;
2865 /* Store (clobber REG) for each clobbered register specified. */
2867 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2869 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2870 int reg, nregs;
2871 int j = decode_reg_name_and_count (regname, &nregs);
2872 rtx clobbered_reg;
2874 if (j < 0)
2876 if (j == -3) /* `cc', which is not a register */
2877 continue;
2879 if (j == -4) /* `memory', don't cache memory across asm */
2881 XVECEXP (body, 0, i++)
2882 = gen_rtx_CLOBBER (VOIDmode,
2883 gen_rtx_MEM
2884 (BLKmode,
2885 gen_rtx_SCRATCH (VOIDmode)));
2886 continue;
2889 /* Ignore unknown register, error already signaled. */
2890 continue;
2893 for (reg = j; reg < j + nregs; reg++)
2895 /* Use QImode since that's guaranteed to clobber just
2896 * one reg. */
2897 clobbered_reg = gen_rtx_REG (QImode, reg);
2899 /* Do sanity check for overlap between clobbers and
2900 respectively input and outputs that hasn't been
2901 handled. Such overlap should have been detected and
2902 reported above. */
2903 if (!clobber_conflict_found)
2905 int opno;
2907 /* We test the old body (obody) contents to avoid
2908 tripping over the under-construction body. */
2909 for (opno = 0; opno < noutputs; opno++)
2910 if (reg_overlap_mentioned_p (clobbered_reg,
2911 output_rtx[opno]))
2912 internal_error
2913 ("asm clobber conflict with output operand");
2915 for (opno = 0; opno < ninputs - ninout; opno++)
2916 if (reg_overlap_mentioned_p (clobbered_reg,
2917 ASM_OPERANDS_INPUT (obody,
2918 opno)))
2919 internal_error
2920 ("asm clobber conflict with input operand");
2923 XVECEXP (body, 0, i++)
2924 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2928 if (nlabels > 0)
2929 emit_jump_insn (body);
2930 else
2931 emit_insn (body);
2934 if (fallthru_label)
2935 emit_label (fallthru_label);
2937 /* For any outputs that needed reloading into registers, spill them
2938 back to where they belong. */
2939 for (i = 0; i < noutputs; ++i)
2940 if (real_output_rtx[i])
2941 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2943 crtl->has_asm_statement = 1;
2944 free_temp_slots ();
2948 static void
2949 expand_asm_stmt (gimple stmt)
2951 int noutputs;
2952 tree outputs, tail, t;
2953 tree *o;
2954 size_t i, n;
2955 const char *s;
2956 tree str, out, in, cl, labels;
2957 location_t locus = gimple_location (stmt);
2958 basic_block fallthru_bb = NULL;
2960 /* Meh... convert the gimple asm operands into real tree lists.
2961 Eventually we should make all routines work on the vectors instead
2962 of relying on TREE_CHAIN. */
2963 out = NULL_TREE;
2964 n = gimple_asm_noutputs (stmt);
2965 if (n > 0)
2967 t = out = gimple_asm_output_op (stmt, 0);
2968 for (i = 1; i < n; i++)
2969 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2972 in = NULL_TREE;
2973 n = gimple_asm_ninputs (stmt);
2974 if (n > 0)
2976 t = in = gimple_asm_input_op (stmt, 0);
2977 for (i = 1; i < n; i++)
2978 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2981 cl = NULL_TREE;
2982 n = gimple_asm_nclobbers (stmt);
2983 if (n > 0)
2985 t = cl = gimple_asm_clobber_op (stmt, 0);
2986 for (i = 1; i < n; i++)
2987 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2990 labels = NULL_TREE;
2991 n = gimple_asm_nlabels (stmt);
2992 if (n > 0)
2994 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2995 if (fallthru)
2996 fallthru_bb = fallthru->dest;
2997 t = labels = gimple_asm_label_op (stmt, 0);
2998 for (i = 1; i < n; i++)
2999 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
3002 s = gimple_asm_string (stmt);
3003 str = build_string (strlen (s), s);
3005 if (gimple_asm_input_p (stmt))
3007 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3008 return;
3011 outputs = out;
3012 noutputs = gimple_asm_noutputs (stmt);
3013 /* o[I] is the place that output number I should be written. */
3014 o = (tree *) alloca (noutputs * sizeof (tree));
3016 /* Record the contents of OUTPUTS before it is modified. */
3017 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3018 o[i] = TREE_VALUE (tail);
3020 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3021 OUTPUTS some trees for where the values were actually stored. */
3022 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3023 gimple_asm_volatile_p (stmt), locus);
3025 /* Copy all the intermediate outputs into the specified outputs. */
3026 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3028 if (o[i] != TREE_VALUE (tail))
3030 expand_assignment (o[i], TREE_VALUE (tail), false);
3031 free_temp_slots ();
3033 /* Restore the original value so that it's correct the next
3034 time we expand this function. */
3035 TREE_VALUE (tail) = o[i];
3040 /* Emit code to jump to the address
3041 specified by the pointer expression EXP. */
3043 static void
3044 expand_computed_goto (tree exp)
3046 rtx x = expand_normal (exp);
3048 x = convert_memory_address (Pmode, x);
3050 do_pending_stack_adjust ();
3051 emit_indirect_jump (x);
3054 /* Generate RTL code for a `goto' statement with target label LABEL.
3055 LABEL should be a LABEL_DECL tree node that was or will later be
3056 defined with `expand_label'. */
3058 static void
3059 expand_goto (tree label)
3061 #ifdef ENABLE_CHECKING
3062 /* Check for a nonlocal goto to a containing function. Should have
3063 gotten translated to __builtin_nonlocal_goto. */
3064 tree context = decl_function_context (label);
3065 gcc_assert (!context || context == current_function_decl);
3066 #endif
3068 emit_jump (label_rtx (label));
3071 /* Output a return with no value. */
3073 static void
3074 expand_null_return_1 (void)
3076 clear_pending_stack_adjust ();
3077 do_pending_stack_adjust ();
3078 emit_jump (return_label);
3081 /* Generate RTL to return from the current function, with no value.
3082 (That is, we do not do anything about returning any value.) */
3084 void
3085 expand_null_return (void)
3087 /* If this function was declared to return a value, but we
3088 didn't, clobber the return registers so that they are not
3089 propagated live to the rest of the function. */
3090 clobber_return_register ();
3092 expand_null_return_1 ();
3095 /* Generate RTL to return from the current function, with value VAL. */
3097 static void
3098 expand_value_return (rtx val)
3100 /* Copy the value to the return location unless it's already there. */
3102 tree decl = DECL_RESULT (current_function_decl);
3103 rtx return_reg = DECL_RTL (decl);
3104 if (return_reg != val)
3106 tree funtype = TREE_TYPE (current_function_decl);
3107 tree type = TREE_TYPE (decl);
3108 int unsignedp = TYPE_UNSIGNED (type);
3109 machine_mode old_mode = DECL_MODE (decl);
3110 machine_mode mode;
3111 if (DECL_BY_REFERENCE (decl))
3112 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3113 else
3114 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3116 if (mode != old_mode)
3117 val = convert_modes (mode, old_mode, val, unsignedp);
3119 if (GET_CODE (return_reg) == PARALLEL)
3120 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3121 else
3122 emit_move_insn (return_reg, val);
3125 expand_null_return_1 ();
3128 /* Generate RTL to evaluate the expression RETVAL and return it
3129 from the current function. */
3131 static void
3132 expand_return (tree retval, tree bounds)
3134 rtx result_rtl;
3135 rtx val = 0;
3136 tree retval_rhs;
3137 rtx bounds_rtl;
3139 /* If function wants no value, give it none. */
3140 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3142 expand_normal (retval);
3143 expand_null_return ();
3144 return;
3147 if (retval == error_mark_node)
3149 /* Treat this like a return of no value from a function that
3150 returns a value. */
3151 expand_null_return ();
3152 return;
3154 else if ((TREE_CODE (retval) == MODIFY_EXPR
3155 || TREE_CODE (retval) == INIT_EXPR)
3156 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3157 retval_rhs = TREE_OPERAND (retval, 1);
3158 else
3159 retval_rhs = retval;
3161 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3163 /* Put returned bounds to the right place. */
3164 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3165 if (bounds_rtl)
3167 rtx addr, bnd;
3169 if (bounds)
3171 bnd = expand_normal (bounds);
3172 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3174 else if (REG_P (bounds_rtl))
3176 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3177 addr = gen_rtx_MEM (Pmode, addr);
3178 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3179 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3181 else
3183 int n;
3185 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3187 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3188 addr = gen_rtx_MEM (Pmode, addr);
3190 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3192 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3193 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3194 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3195 rtx bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3196 targetm.calls.store_returned_bounds (slot, bnd);
3200 else if (chkp_function_instrumented_p (current_function_decl)
3201 && !BOUNDED_P (retval_rhs)
3202 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3203 && TREE_CODE (retval_rhs) != RESULT_DECL)
3205 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3206 addr = gen_rtx_MEM (Pmode, addr);
3208 gcc_assert (MEM_P (result_rtl));
3210 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3213 /* If we are returning the RESULT_DECL, then the value has already
3214 been stored into it, so we don't have to do anything special. */
3215 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3216 expand_value_return (result_rtl);
3218 /* If the result is an aggregate that is being returned in one (or more)
3219 registers, load the registers here. */
3221 else if (retval_rhs != 0
3222 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3223 && REG_P (result_rtl))
3225 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3226 if (val)
3228 /* Use the mode of the result value on the return register. */
3229 PUT_MODE (result_rtl, GET_MODE (val));
3230 expand_value_return (val);
3232 else
3233 expand_null_return ();
3235 else if (retval_rhs != 0
3236 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3237 && (REG_P (result_rtl)
3238 || (GET_CODE (result_rtl) == PARALLEL)))
3240 /* Compute the return value into a temporary (usually a pseudo reg). */
3242 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3243 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3244 val = force_not_mem (val);
3245 expand_value_return (val);
3247 else
3249 /* No hard reg used; calculate value into hard return reg. */
3250 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3251 expand_value_return (result_rtl);
3255 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3256 STMT that doesn't require special handling for outgoing edges. That
3257 is no tailcalls and no GIMPLE_COND. */
3259 static void
3260 expand_gimple_stmt_1 (gimple stmt)
3262 tree op0;
3264 set_curr_insn_location (gimple_location (stmt));
3266 switch (gimple_code (stmt))
3268 case GIMPLE_GOTO:
3269 op0 = gimple_goto_dest (stmt);
3270 if (TREE_CODE (op0) == LABEL_DECL)
3271 expand_goto (op0);
3272 else
3273 expand_computed_goto (op0);
3274 break;
3275 case GIMPLE_LABEL:
3276 expand_label (gimple_label_label (stmt));
3277 break;
3278 case GIMPLE_NOP:
3279 case GIMPLE_PREDICT:
3280 break;
3281 case GIMPLE_SWITCH:
3282 expand_case (stmt);
3283 break;
3284 case GIMPLE_ASM:
3285 expand_asm_stmt (stmt);
3286 break;
3287 case GIMPLE_CALL:
3288 expand_call_stmt (stmt);
3289 break;
3291 case GIMPLE_RETURN:
3292 op0 = gimple_return_retval (stmt);
3294 if (op0 && op0 != error_mark_node)
3296 tree result = DECL_RESULT (current_function_decl);
3298 /* If we are not returning the current function's RESULT_DECL,
3299 build an assignment to it. */
3300 if (op0 != result)
3302 /* I believe that a function's RESULT_DECL is unique. */
3303 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3305 /* ??? We'd like to use simply expand_assignment here,
3306 but this fails if the value is of BLKmode but the return
3307 decl is a register. expand_return has special handling
3308 for this combination, which eventually should move
3309 to common code. See comments there. Until then, let's
3310 build a modify expression :-/ */
3311 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3312 result, op0);
3315 if (!op0)
3316 expand_null_return ();
3317 else
3318 expand_return (op0, gimple_return_retbnd (stmt));
3319 break;
3321 case GIMPLE_ASSIGN:
3323 tree lhs = gimple_assign_lhs (stmt);
3325 /* Tree expand used to fiddle with |= and &= of two bitfield
3326 COMPONENT_REFs here. This can't happen with gimple, the LHS
3327 of binary assigns must be a gimple reg. */
3329 if (TREE_CODE (lhs) != SSA_NAME
3330 || get_gimple_rhs_class (gimple_expr_code (stmt))
3331 == GIMPLE_SINGLE_RHS)
3333 tree rhs = gimple_assign_rhs1 (stmt);
3334 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3335 == GIMPLE_SINGLE_RHS);
3336 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3337 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3338 if (TREE_CLOBBER_P (rhs))
3339 /* This is a clobber to mark the going out of scope for
3340 this LHS. */
3342 else
3343 expand_assignment (lhs, rhs,
3344 gimple_assign_nontemporal_move_p (stmt));
3346 else
3348 rtx target, temp;
3349 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3350 struct separate_ops ops;
3351 bool promoted = false;
3353 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3354 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3355 promoted = true;
3357 ops.code = gimple_assign_rhs_code (stmt);
3358 ops.type = TREE_TYPE (lhs);
3359 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3361 case GIMPLE_TERNARY_RHS:
3362 ops.op2 = gimple_assign_rhs3 (stmt);
3363 /* Fallthru */
3364 case GIMPLE_BINARY_RHS:
3365 ops.op1 = gimple_assign_rhs2 (stmt);
3366 /* Fallthru */
3367 case GIMPLE_UNARY_RHS:
3368 ops.op0 = gimple_assign_rhs1 (stmt);
3369 break;
3370 default:
3371 gcc_unreachable ();
3373 ops.location = gimple_location (stmt);
3375 /* If we want to use a nontemporal store, force the value to
3376 register first. If we store into a promoted register,
3377 don't directly expand to target. */
3378 temp = nontemporal || promoted ? NULL_RTX : target;
3379 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3380 EXPAND_NORMAL);
3382 if (temp == target)
3384 else if (promoted)
3386 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3387 /* If TEMP is a VOIDmode constant, use convert_modes to make
3388 sure that we properly convert it. */
3389 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3391 temp = convert_modes (GET_MODE (target),
3392 TYPE_MODE (ops.type),
3393 temp, unsignedp);
3394 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3395 GET_MODE (target), temp, unsignedp);
3398 convert_move (SUBREG_REG (target), temp, unsignedp);
3400 else if (nontemporal && emit_storent_insn (target, temp))
3402 else
3404 temp = force_operand (temp, target);
3405 if (temp != target)
3406 emit_move_insn (target, temp);
3410 break;
3412 default:
3413 gcc_unreachable ();
3417 /* Expand one gimple statement STMT and return the last RTL instruction
3418 before any of the newly generated ones.
3420 In addition to generating the necessary RTL instructions this also
3421 sets REG_EH_REGION notes if necessary and sets the current source
3422 location for diagnostics. */
3424 static rtx_insn *
3425 expand_gimple_stmt (gimple stmt)
3427 location_t saved_location = input_location;
3428 rtx_insn *last = get_last_insn ();
3429 int lp_nr;
3431 gcc_assert (cfun);
3433 /* We need to save and restore the current source location so that errors
3434 discovered during expansion are emitted with the right location. But
3435 it would be better if the diagnostic routines used the source location
3436 embedded in the tree nodes rather than globals. */
3437 if (gimple_has_location (stmt))
3438 input_location = gimple_location (stmt);
3440 expand_gimple_stmt_1 (stmt);
3442 /* Free any temporaries used to evaluate this statement. */
3443 free_temp_slots ();
3445 input_location = saved_location;
3447 /* Mark all insns that may trap. */
3448 lp_nr = lookup_stmt_eh_lp (stmt);
3449 if (lp_nr)
3451 rtx_insn *insn;
3452 for (insn = next_real_insn (last); insn;
3453 insn = next_real_insn (insn))
3455 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3456 /* If we want exceptions for non-call insns, any
3457 may_trap_p instruction may throw. */
3458 && GET_CODE (PATTERN (insn)) != CLOBBER
3459 && GET_CODE (PATTERN (insn)) != USE
3460 && insn_could_throw_p (insn))
3461 make_reg_eh_region_note (insn, 0, lp_nr);
3465 return last;
3468 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3469 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3470 generated a tail call (something that might be denied by the ABI
3471 rules governing the call; see calls.c).
3473 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3474 can still reach the rest of BB. The case here is __builtin_sqrt,
3475 where the NaN result goes through the external function (with a
3476 tailcall) and the normal result happens via a sqrt instruction. */
3478 static basic_block
3479 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
3481 rtx_insn *last2, *last;
3482 edge e;
3483 edge_iterator ei;
3484 int probability;
3485 gcov_type count;
3487 last2 = last = expand_gimple_stmt (stmt);
3489 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3490 if (CALL_P (last) && SIBLING_CALL_P (last))
3491 goto found;
3493 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3495 *can_fallthru = true;
3496 return NULL;
3498 found:
3499 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3500 Any instructions emitted here are about to be deleted. */
3501 do_pending_stack_adjust ();
3503 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3504 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3505 EH or abnormal edges, we shouldn't have created a tail call in
3506 the first place. So it seems to me we should just be removing
3507 all edges here, or redirecting the existing fallthru edge to
3508 the exit block. */
3510 probability = 0;
3511 count = 0;
3513 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3515 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3517 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3519 e->dest->count -= e->count;
3520 e->dest->frequency -= EDGE_FREQUENCY (e);
3521 if (e->dest->count < 0)
3522 e->dest->count = 0;
3523 if (e->dest->frequency < 0)
3524 e->dest->frequency = 0;
3526 count += e->count;
3527 probability += e->probability;
3528 remove_edge (e);
3530 else
3531 ei_next (&ei);
3534 /* This is somewhat ugly: the call_expr expander often emits instructions
3535 after the sibcall (to perform the function return). These confuse the
3536 find_many_sub_basic_blocks code, so we need to get rid of these. */
3537 last = NEXT_INSN (last);
3538 gcc_assert (BARRIER_P (last));
3540 *can_fallthru = false;
3541 while (NEXT_INSN (last))
3543 /* For instance an sqrt builtin expander expands if with
3544 sibcall in the then and label for `else`. */
3545 if (LABEL_P (NEXT_INSN (last)))
3547 *can_fallthru = true;
3548 break;
3550 delete_insn (NEXT_INSN (last));
3553 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3554 | EDGE_SIBCALL);
3555 e->probability += probability;
3556 e->count += count;
3557 BB_END (bb) = last;
3558 update_bb_for_insn (bb);
3560 if (NEXT_INSN (last))
3562 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3564 last = BB_END (bb);
3565 if (BARRIER_P (last))
3566 BB_END (bb) = PREV_INSN (last);
3569 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3571 return bb;
3574 /* Return the difference between the floor and the truncated result of
3575 a signed division by OP1 with remainder MOD. */
3576 static rtx
3577 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3579 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3580 return gen_rtx_IF_THEN_ELSE
3581 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3582 gen_rtx_IF_THEN_ELSE
3583 (mode, gen_rtx_LT (BImode,
3584 gen_rtx_DIV (mode, op1, mod),
3585 const0_rtx),
3586 constm1_rtx, const0_rtx),
3587 const0_rtx);
3590 /* Return the difference between the ceil and the truncated result of
3591 a signed division by OP1 with remainder MOD. */
3592 static rtx
3593 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3595 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3596 return gen_rtx_IF_THEN_ELSE
3597 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3598 gen_rtx_IF_THEN_ELSE
3599 (mode, gen_rtx_GT (BImode,
3600 gen_rtx_DIV (mode, op1, mod),
3601 const0_rtx),
3602 const1_rtx, const0_rtx),
3603 const0_rtx);
3606 /* Return the difference between the ceil and the truncated result of
3607 an unsigned division by OP1 with remainder MOD. */
3608 static rtx
3609 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3611 /* (mod != 0 ? 1 : 0) */
3612 return gen_rtx_IF_THEN_ELSE
3613 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3614 const1_rtx, const0_rtx);
3617 /* Return the difference between the rounded and the truncated result
3618 of a signed division by OP1 with remainder MOD. Halfway cases are
3619 rounded away from zero, rather than to the nearest even number. */
3620 static rtx
3621 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3623 /* (abs (mod) >= abs (op1) - abs (mod)
3624 ? (op1 / mod > 0 ? 1 : -1)
3625 : 0) */
3626 return gen_rtx_IF_THEN_ELSE
3627 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3628 gen_rtx_MINUS (mode,
3629 gen_rtx_ABS (mode, op1),
3630 gen_rtx_ABS (mode, mod))),
3631 gen_rtx_IF_THEN_ELSE
3632 (mode, gen_rtx_GT (BImode,
3633 gen_rtx_DIV (mode, op1, mod),
3634 const0_rtx),
3635 const1_rtx, constm1_rtx),
3636 const0_rtx);
3639 /* Return the difference between the rounded and the truncated result
3640 of a unsigned division by OP1 with remainder MOD. Halfway cases
3641 are rounded away from zero, rather than to the nearest even
3642 number. */
3643 static rtx
3644 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3646 /* (mod >= op1 - mod ? 1 : 0) */
3647 return gen_rtx_IF_THEN_ELSE
3648 (mode, gen_rtx_GE (BImode, mod,
3649 gen_rtx_MINUS (mode, op1, mod)),
3650 const1_rtx, const0_rtx);
3653 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3654 any rtl. */
3656 static rtx
3657 convert_debug_memory_address (machine_mode mode, rtx x,
3658 addr_space_t as)
3660 machine_mode xmode = GET_MODE (x);
3662 #ifndef POINTERS_EXTEND_UNSIGNED
3663 gcc_assert (mode == Pmode
3664 || mode == targetm.addr_space.address_mode (as));
3665 gcc_assert (xmode == mode || xmode == VOIDmode);
3666 #else
3667 rtx temp;
3669 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3671 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3672 return x;
3674 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3675 x = simplify_gen_subreg (mode, x, xmode,
3676 subreg_lowpart_offset
3677 (mode, xmode));
3678 else if (POINTERS_EXTEND_UNSIGNED > 0)
3679 x = gen_rtx_ZERO_EXTEND (mode, x);
3680 else if (!POINTERS_EXTEND_UNSIGNED)
3681 x = gen_rtx_SIGN_EXTEND (mode, x);
3682 else
3684 switch (GET_CODE (x))
3686 case SUBREG:
3687 if ((SUBREG_PROMOTED_VAR_P (x)
3688 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3689 || (GET_CODE (SUBREG_REG (x)) == PLUS
3690 && REG_P (XEXP (SUBREG_REG (x), 0))
3691 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3692 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3693 && GET_MODE (SUBREG_REG (x)) == mode)
3694 return SUBREG_REG (x);
3695 break;
3696 case LABEL_REF:
3697 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3698 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3699 return temp;
3700 case SYMBOL_REF:
3701 temp = shallow_copy_rtx (x);
3702 PUT_MODE (temp, mode);
3703 return temp;
3704 case CONST:
3705 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3706 if (temp)
3707 temp = gen_rtx_CONST (mode, temp);
3708 return temp;
3709 case PLUS:
3710 case MINUS:
3711 if (CONST_INT_P (XEXP (x, 1)))
3713 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3714 if (temp)
3715 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3717 break;
3718 default:
3719 break;
3721 /* Don't know how to express ptr_extend as operation in debug info. */
3722 return NULL;
3724 #endif /* POINTERS_EXTEND_UNSIGNED */
3726 return x;
3729 /* Return an RTX equivalent to the value of the parameter DECL. */
3731 static rtx
3732 expand_debug_parm_decl (tree decl)
3734 rtx incoming = DECL_INCOMING_RTL (decl);
3736 if (incoming
3737 && GET_MODE (incoming) != BLKmode
3738 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3739 || (MEM_P (incoming)
3740 && REG_P (XEXP (incoming, 0))
3741 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3743 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3745 #ifdef HAVE_window_save
3746 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3747 If the target machine has an explicit window save instruction, the
3748 actual entry value is the corresponding OUTGOING_REGNO instead. */
3749 if (REG_P (incoming)
3750 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3751 incoming
3752 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3753 OUTGOING_REGNO (REGNO (incoming)), 0);
3754 else if (MEM_P (incoming))
3756 rtx reg = XEXP (incoming, 0);
3757 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3759 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3760 incoming = replace_equiv_address_nv (incoming, reg);
3762 else
3763 incoming = copy_rtx (incoming);
3765 #endif
3767 ENTRY_VALUE_EXP (rtl) = incoming;
3768 return rtl;
3771 if (incoming
3772 && GET_MODE (incoming) != BLKmode
3773 && !TREE_ADDRESSABLE (decl)
3774 && MEM_P (incoming)
3775 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3776 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3777 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3778 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3779 return copy_rtx (incoming);
3781 return NULL_RTX;
3784 /* Return an RTX equivalent to the value of the tree expression EXP. */
3786 static rtx
3787 expand_debug_expr (tree exp)
3789 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3790 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3791 machine_mode inner_mode = VOIDmode;
3792 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3793 addr_space_t as;
3795 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3797 case tcc_expression:
3798 switch (TREE_CODE (exp))
3800 case COND_EXPR:
3801 case DOT_PROD_EXPR:
3802 case SAD_EXPR:
3803 case WIDEN_MULT_PLUS_EXPR:
3804 case WIDEN_MULT_MINUS_EXPR:
3805 case FMA_EXPR:
3806 goto ternary;
3808 case TRUTH_ANDIF_EXPR:
3809 case TRUTH_ORIF_EXPR:
3810 case TRUTH_AND_EXPR:
3811 case TRUTH_OR_EXPR:
3812 case TRUTH_XOR_EXPR:
3813 goto binary;
3815 case TRUTH_NOT_EXPR:
3816 goto unary;
3818 default:
3819 break;
3821 break;
3823 ternary:
3824 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3825 if (!op2)
3826 return NULL_RTX;
3827 /* Fall through. */
3829 binary:
3830 case tcc_binary:
3831 case tcc_comparison:
3832 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3833 if (!op1)
3834 return NULL_RTX;
3835 /* Fall through. */
3837 unary:
3838 case tcc_unary:
3839 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3840 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3841 if (!op0)
3842 return NULL_RTX;
3843 break;
3845 case tcc_type:
3846 case tcc_statement:
3847 gcc_unreachable ();
3849 case tcc_constant:
3850 case tcc_exceptional:
3851 case tcc_declaration:
3852 case tcc_reference:
3853 case tcc_vl_exp:
3854 break;
3857 switch (TREE_CODE (exp))
3859 case STRING_CST:
3860 if (!lookup_constant_def (exp))
3862 if (strlen (TREE_STRING_POINTER (exp)) + 1
3863 != (size_t) TREE_STRING_LENGTH (exp))
3864 return NULL_RTX;
3865 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3866 op0 = gen_rtx_MEM (BLKmode, op0);
3867 set_mem_attributes (op0, exp, 0);
3868 return op0;
3870 /* Fall through... */
3872 case INTEGER_CST:
3873 case REAL_CST:
3874 case FIXED_CST:
3875 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3876 return op0;
3878 case COMPLEX_CST:
3879 gcc_assert (COMPLEX_MODE_P (mode));
3880 op0 = expand_debug_expr (TREE_REALPART (exp));
3881 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3882 return gen_rtx_CONCAT (mode, op0, op1);
3884 case DEBUG_EXPR_DECL:
3885 op0 = DECL_RTL_IF_SET (exp);
3887 if (op0)
3888 return op0;
3890 op0 = gen_rtx_DEBUG_EXPR (mode);
3891 DEBUG_EXPR_TREE_DECL (op0) = exp;
3892 SET_DECL_RTL (exp, op0);
3894 return op0;
3896 case VAR_DECL:
3897 case PARM_DECL:
3898 case FUNCTION_DECL:
3899 case LABEL_DECL:
3900 case CONST_DECL:
3901 case RESULT_DECL:
3902 op0 = DECL_RTL_IF_SET (exp);
3904 /* This decl was probably optimized away. */
3905 if (!op0)
3907 if (TREE_CODE (exp) != VAR_DECL
3908 || DECL_EXTERNAL (exp)
3909 || !TREE_STATIC (exp)
3910 || !DECL_NAME (exp)
3911 || DECL_HARD_REGISTER (exp)
3912 || DECL_IN_CONSTANT_POOL (exp)
3913 || mode == VOIDmode)
3914 return NULL;
3916 op0 = make_decl_rtl_for_debug (exp);
3917 if (!MEM_P (op0)
3918 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3919 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3920 return NULL;
3922 else
3923 op0 = copy_rtx (op0);
3925 if (GET_MODE (op0) == BLKmode
3926 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3927 below would ICE. While it is likely a FE bug,
3928 try to be robust here. See PR43166. */
3929 || mode == BLKmode
3930 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3932 gcc_assert (MEM_P (op0));
3933 op0 = adjust_address_nv (op0, mode, 0);
3934 return op0;
3937 /* Fall through. */
3939 adjust_mode:
3940 case PAREN_EXPR:
3941 CASE_CONVERT:
3943 inner_mode = GET_MODE (op0);
3945 if (mode == inner_mode)
3946 return op0;
3948 if (inner_mode == VOIDmode)
3950 if (TREE_CODE (exp) == SSA_NAME)
3951 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3952 else
3953 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3954 if (mode == inner_mode)
3955 return op0;
3958 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3960 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3961 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3962 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3963 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3964 else
3965 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3967 else if (FLOAT_MODE_P (mode))
3969 gcc_assert (TREE_CODE (exp) != SSA_NAME);
3970 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3971 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3972 else
3973 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3975 else if (FLOAT_MODE_P (inner_mode))
3977 if (unsignedp)
3978 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3979 else
3980 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3982 else if (CONSTANT_P (op0)
3983 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3984 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3985 subreg_lowpart_offset (mode,
3986 inner_mode));
3987 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3988 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3989 : unsignedp)
3990 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3991 else
3992 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3994 return op0;
3997 case MEM_REF:
3998 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4000 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4001 TREE_OPERAND (exp, 0),
4002 TREE_OPERAND (exp, 1));
4003 if (newexp)
4004 return expand_debug_expr (newexp);
4006 /* FALLTHROUGH */
4007 case INDIRECT_REF:
4008 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4009 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4010 if (!op0)
4011 return NULL;
4013 if (TREE_CODE (exp) == MEM_REF)
4015 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4016 || (GET_CODE (op0) == PLUS
4017 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4018 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4019 Instead just use get_inner_reference. */
4020 goto component_ref;
4022 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4023 if (!op1 || !CONST_INT_P (op1))
4024 return NULL;
4026 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4029 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4031 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4032 op0, as);
4033 if (op0 == NULL_RTX)
4034 return NULL;
4036 op0 = gen_rtx_MEM (mode, op0);
4037 set_mem_attributes (op0, exp, 0);
4038 if (TREE_CODE (exp) == MEM_REF
4039 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4040 set_mem_expr (op0, NULL_TREE);
4041 set_mem_addr_space (op0, as);
4043 return op0;
4045 case TARGET_MEM_REF:
4046 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4047 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4048 return NULL;
4050 op0 = expand_debug_expr
4051 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4052 if (!op0)
4053 return NULL;
4055 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4056 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4057 op0, as);
4058 if (op0 == NULL_RTX)
4059 return NULL;
4061 op0 = gen_rtx_MEM (mode, op0);
4063 set_mem_attributes (op0, exp, 0);
4064 set_mem_addr_space (op0, as);
4066 return op0;
4068 component_ref:
4069 case ARRAY_REF:
4070 case ARRAY_RANGE_REF:
4071 case COMPONENT_REF:
4072 case BIT_FIELD_REF:
4073 case REALPART_EXPR:
4074 case IMAGPART_EXPR:
4075 case VIEW_CONVERT_EXPR:
4077 machine_mode mode1;
4078 HOST_WIDE_INT bitsize, bitpos;
4079 tree offset;
4080 int volatilep = 0;
4081 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4082 &mode1, &unsignedp, &volatilep, false);
4083 rtx orig_op0;
4085 if (bitsize == 0)
4086 return NULL;
4088 orig_op0 = op0 = expand_debug_expr (tem);
4090 if (!op0)
4091 return NULL;
4093 if (offset)
4095 machine_mode addrmode, offmode;
4097 if (!MEM_P (op0))
4098 return NULL;
4100 op0 = XEXP (op0, 0);
4101 addrmode = GET_MODE (op0);
4102 if (addrmode == VOIDmode)
4103 addrmode = Pmode;
4105 op1 = expand_debug_expr (offset);
4106 if (!op1)
4107 return NULL;
4109 offmode = GET_MODE (op1);
4110 if (offmode == VOIDmode)
4111 offmode = TYPE_MODE (TREE_TYPE (offset));
4113 if (addrmode != offmode)
4114 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4115 subreg_lowpart_offset (addrmode,
4116 offmode));
4118 /* Don't use offset_address here, we don't need a
4119 recognizable address, and we don't want to generate
4120 code. */
4121 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4122 op0, op1));
4125 if (MEM_P (op0))
4127 if (mode1 == VOIDmode)
4128 /* Bitfield. */
4129 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4130 if (bitpos >= BITS_PER_UNIT)
4132 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4133 bitpos %= BITS_PER_UNIT;
4135 else if (bitpos < 0)
4137 HOST_WIDE_INT units
4138 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4139 op0 = adjust_address_nv (op0, mode1, units);
4140 bitpos += units * BITS_PER_UNIT;
4142 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4143 op0 = adjust_address_nv (op0, mode, 0);
4144 else if (GET_MODE (op0) != mode1)
4145 op0 = adjust_address_nv (op0, mode1, 0);
4146 else
4147 op0 = copy_rtx (op0);
4148 if (op0 == orig_op0)
4149 op0 = shallow_copy_rtx (op0);
4150 set_mem_attributes (op0, exp, 0);
4153 if (bitpos == 0 && mode == GET_MODE (op0))
4154 return op0;
4156 if (bitpos < 0)
4157 return NULL;
4159 if (GET_MODE (op0) == BLKmode)
4160 return NULL;
4162 if ((bitpos % BITS_PER_UNIT) == 0
4163 && bitsize == GET_MODE_BITSIZE (mode1))
4165 machine_mode opmode = GET_MODE (op0);
4167 if (opmode == VOIDmode)
4168 opmode = TYPE_MODE (TREE_TYPE (tem));
4170 /* This condition may hold if we're expanding the address
4171 right past the end of an array that turned out not to
4172 be addressable (i.e., the address was only computed in
4173 debug stmts). The gen_subreg below would rightfully
4174 crash, and the address doesn't really exist, so just
4175 drop it. */
4176 if (bitpos >= GET_MODE_BITSIZE (opmode))
4177 return NULL;
4179 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4180 return simplify_gen_subreg (mode, op0, opmode,
4181 bitpos / BITS_PER_UNIT);
4184 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4185 && TYPE_UNSIGNED (TREE_TYPE (exp))
4186 ? SIGN_EXTRACT
4187 : ZERO_EXTRACT, mode,
4188 GET_MODE (op0) != VOIDmode
4189 ? GET_MODE (op0)
4190 : TYPE_MODE (TREE_TYPE (tem)),
4191 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4194 case ABS_EXPR:
4195 return simplify_gen_unary (ABS, mode, op0, mode);
4197 case NEGATE_EXPR:
4198 return simplify_gen_unary (NEG, mode, op0, mode);
4200 case BIT_NOT_EXPR:
4201 return simplify_gen_unary (NOT, mode, op0, mode);
4203 case FLOAT_EXPR:
4204 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4205 0)))
4206 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4207 inner_mode);
4209 case FIX_TRUNC_EXPR:
4210 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4211 inner_mode);
4213 case POINTER_PLUS_EXPR:
4214 /* For the rare target where pointers are not the same size as
4215 size_t, we need to check for mis-matched modes and correct
4216 the addend. */
4217 if (op0 && op1
4218 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4219 && GET_MODE (op0) != GET_MODE (op1))
4221 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4222 /* If OP0 is a partial mode, then we must truncate, even if it has
4223 the same bitsize as OP1 as GCC's representation of partial modes
4224 is opaque. */
4225 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4226 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4227 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4228 GET_MODE (op1));
4229 else
4230 /* We always sign-extend, regardless of the signedness of
4231 the operand, because the operand is always unsigned
4232 here even if the original C expression is signed. */
4233 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4234 GET_MODE (op1));
4236 /* Fall through. */
4237 case PLUS_EXPR:
4238 return simplify_gen_binary (PLUS, mode, op0, op1);
4240 case MINUS_EXPR:
4241 return simplify_gen_binary (MINUS, mode, op0, op1);
4243 case MULT_EXPR:
4244 return simplify_gen_binary (MULT, mode, op0, op1);
4246 case RDIV_EXPR:
4247 case TRUNC_DIV_EXPR:
4248 case EXACT_DIV_EXPR:
4249 if (unsignedp)
4250 return simplify_gen_binary (UDIV, mode, op0, op1);
4251 else
4252 return simplify_gen_binary (DIV, mode, op0, op1);
4254 case TRUNC_MOD_EXPR:
4255 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4257 case FLOOR_DIV_EXPR:
4258 if (unsignedp)
4259 return simplify_gen_binary (UDIV, mode, op0, op1);
4260 else
4262 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4263 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4264 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4265 return simplify_gen_binary (PLUS, mode, div, adj);
4268 case FLOOR_MOD_EXPR:
4269 if (unsignedp)
4270 return simplify_gen_binary (UMOD, mode, op0, op1);
4271 else
4273 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4274 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4275 adj = simplify_gen_unary (NEG, mode,
4276 simplify_gen_binary (MULT, mode, adj, op1),
4277 mode);
4278 return simplify_gen_binary (PLUS, mode, mod, adj);
4281 case CEIL_DIV_EXPR:
4282 if (unsignedp)
4284 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4285 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4286 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4287 return simplify_gen_binary (PLUS, mode, div, adj);
4289 else
4291 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4292 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4293 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4294 return simplify_gen_binary (PLUS, mode, div, adj);
4297 case CEIL_MOD_EXPR:
4298 if (unsignedp)
4300 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4301 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4302 adj = simplify_gen_unary (NEG, mode,
4303 simplify_gen_binary (MULT, mode, adj, op1),
4304 mode);
4305 return simplify_gen_binary (PLUS, mode, mod, adj);
4307 else
4309 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4310 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4311 adj = simplify_gen_unary (NEG, mode,
4312 simplify_gen_binary (MULT, mode, adj, op1),
4313 mode);
4314 return simplify_gen_binary (PLUS, mode, mod, adj);
4317 case ROUND_DIV_EXPR:
4318 if (unsignedp)
4320 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4321 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4322 rtx adj = round_udiv_adjust (mode, mod, op1);
4323 return simplify_gen_binary (PLUS, mode, div, adj);
4325 else
4327 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4328 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4329 rtx adj = round_sdiv_adjust (mode, mod, op1);
4330 return simplify_gen_binary (PLUS, mode, div, adj);
4333 case ROUND_MOD_EXPR:
4334 if (unsignedp)
4336 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4337 rtx adj = round_udiv_adjust (mode, mod, op1);
4338 adj = simplify_gen_unary (NEG, mode,
4339 simplify_gen_binary (MULT, mode, adj, op1),
4340 mode);
4341 return simplify_gen_binary (PLUS, mode, mod, adj);
4343 else
4345 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4346 rtx adj = round_sdiv_adjust (mode, mod, op1);
4347 adj = simplify_gen_unary (NEG, mode,
4348 simplify_gen_binary (MULT, mode, adj, op1),
4349 mode);
4350 return simplify_gen_binary (PLUS, mode, mod, adj);
4353 case LSHIFT_EXPR:
4354 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4356 case RSHIFT_EXPR:
4357 if (unsignedp)
4358 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4359 else
4360 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4362 case LROTATE_EXPR:
4363 return simplify_gen_binary (ROTATE, mode, op0, op1);
4365 case RROTATE_EXPR:
4366 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4368 case MIN_EXPR:
4369 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4371 case MAX_EXPR:
4372 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4374 case BIT_AND_EXPR:
4375 case TRUTH_AND_EXPR:
4376 return simplify_gen_binary (AND, mode, op0, op1);
4378 case BIT_IOR_EXPR:
4379 case TRUTH_OR_EXPR:
4380 return simplify_gen_binary (IOR, mode, op0, op1);
4382 case BIT_XOR_EXPR:
4383 case TRUTH_XOR_EXPR:
4384 return simplify_gen_binary (XOR, mode, op0, op1);
4386 case TRUTH_ANDIF_EXPR:
4387 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4389 case TRUTH_ORIF_EXPR:
4390 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4392 case TRUTH_NOT_EXPR:
4393 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4395 case LT_EXPR:
4396 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4397 op0, op1);
4399 case LE_EXPR:
4400 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4401 op0, op1);
4403 case GT_EXPR:
4404 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4405 op0, op1);
4407 case GE_EXPR:
4408 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4409 op0, op1);
4411 case EQ_EXPR:
4412 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4414 case NE_EXPR:
4415 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4417 case UNORDERED_EXPR:
4418 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4420 case ORDERED_EXPR:
4421 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4423 case UNLT_EXPR:
4424 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4426 case UNLE_EXPR:
4427 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4429 case UNGT_EXPR:
4430 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4432 case UNGE_EXPR:
4433 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4435 case UNEQ_EXPR:
4436 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4438 case LTGT_EXPR:
4439 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4441 case COND_EXPR:
4442 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4444 case COMPLEX_EXPR:
4445 gcc_assert (COMPLEX_MODE_P (mode));
4446 if (GET_MODE (op0) == VOIDmode)
4447 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4448 if (GET_MODE (op1) == VOIDmode)
4449 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4450 return gen_rtx_CONCAT (mode, op0, op1);
4452 case CONJ_EXPR:
4453 if (GET_CODE (op0) == CONCAT)
4454 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4455 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4456 XEXP (op0, 1),
4457 GET_MODE_INNER (mode)));
4458 else
4460 machine_mode imode = GET_MODE_INNER (mode);
4461 rtx re, im;
4463 if (MEM_P (op0))
4465 re = adjust_address_nv (op0, imode, 0);
4466 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4468 else
4470 machine_mode ifmode = int_mode_for_mode (mode);
4471 machine_mode ihmode = int_mode_for_mode (imode);
4472 rtx halfsize;
4473 if (ifmode == BLKmode || ihmode == BLKmode)
4474 return NULL;
4475 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4476 re = op0;
4477 if (mode != ifmode)
4478 re = gen_rtx_SUBREG (ifmode, re, 0);
4479 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4480 if (imode != ihmode)
4481 re = gen_rtx_SUBREG (imode, re, 0);
4482 im = copy_rtx (op0);
4483 if (mode != ifmode)
4484 im = gen_rtx_SUBREG (ifmode, im, 0);
4485 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4486 if (imode != ihmode)
4487 im = gen_rtx_SUBREG (imode, im, 0);
4489 im = gen_rtx_NEG (imode, im);
4490 return gen_rtx_CONCAT (mode, re, im);
4493 case ADDR_EXPR:
4494 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4495 if (!op0 || !MEM_P (op0))
4497 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4498 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4499 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4500 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4501 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4502 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4504 if (handled_component_p (TREE_OPERAND (exp, 0)))
4506 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4507 tree decl
4508 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4509 &bitoffset, &bitsize, &maxsize);
4510 if ((TREE_CODE (decl) == VAR_DECL
4511 || TREE_CODE (decl) == PARM_DECL
4512 || TREE_CODE (decl) == RESULT_DECL)
4513 && (!TREE_ADDRESSABLE (decl)
4514 || target_for_debug_bind (decl))
4515 && (bitoffset % BITS_PER_UNIT) == 0
4516 && bitsize > 0
4517 && bitsize == maxsize)
4519 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4520 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4524 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4525 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4526 == ADDR_EXPR)
4528 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4529 0));
4530 if (op0 != NULL
4531 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4532 || (GET_CODE (op0) == PLUS
4533 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4534 && CONST_INT_P (XEXP (op0, 1)))))
4536 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4537 1));
4538 if (!op1 || !CONST_INT_P (op1))
4539 return NULL;
4541 return plus_constant (mode, op0, INTVAL (op1));
4545 return NULL;
4548 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4549 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4551 return op0;
4553 case VECTOR_CST:
4555 unsigned i;
4557 op0 = gen_rtx_CONCATN
4558 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4560 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4562 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4563 if (!op1)
4564 return NULL;
4565 XVECEXP (op0, 0, i) = op1;
4568 return op0;
4571 case CONSTRUCTOR:
4572 if (TREE_CLOBBER_P (exp))
4573 return NULL;
4574 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4576 unsigned i;
4577 tree val;
4579 op0 = gen_rtx_CONCATN
4580 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4582 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4584 op1 = expand_debug_expr (val);
4585 if (!op1)
4586 return NULL;
4587 XVECEXP (op0, 0, i) = op1;
4590 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4592 op1 = expand_debug_expr
4593 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4595 if (!op1)
4596 return NULL;
4598 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4599 XVECEXP (op0, 0, i) = op1;
4602 return op0;
4604 else
4605 goto flag_unsupported;
4607 case CALL_EXPR:
4608 /* ??? Maybe handle some builtins? */
4609 return NULL;
4611 case SSA_NAME:
4613 gimple g = get_gimple_for_ssa_name (exp);
4614 if (g)
4616 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4617 if (!op0)
4618 return NULL;
4620 else
4622 int part = var_to_partition (SA.map, exp);
4624 if (part == NO_PARTITION)
4626 /* If this is a reference to an incoming value of parameter
4627 that is never used in the code or where the incoming
4628 value is never used in the code, use PARM_DECL's
4629 DECL_RTL if set. */
4630 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4631 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4633 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4634 if (op0)
4635 goto adjust_mode;
4636 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4637 if (op0)
4638 goto adjust_mode;
4640 return NULL;
4643 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4645 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4647 goto adjust_mode;
4650 case ERROR_MARK:
4651 return NULL;
4653 /* Vector stuff. For most of the codes we don't have rtl codes. */
4654 case REALIGN_LOAD_EXPR:
4655 case REDUC_MAX_EXPR:
4656 case REDUC_MIN_EXPR:
4657 case REDUC_PLUS_EXPR:
4658 case VEC_COND_EXPR:
4659 case VEC_PACK_FIX_TRUNC_EXPR:
4660 case VEC_PACK_SAT_EXPR:
4661 case VEC_PACK_TRUNC_EXPR:
4662 case VEC_RSHIFT_EXPR:
4663 case VEC_UNPACK_FLOAT_HI_EXPR:
4664 case VEC_UNPACK_FLOAT_LO_EXPR:
4665 case VEC_UNPACK_HI_EXPR:
4666 case VEC_UNPACK_LO_EXPR:
4667 case VEC_WIDEN_MULT_HI_EXPR:
4668 case VEC_WIDEN_MULT_LO_EXPR:
4669 case VEC_WIDEN_MULT_EVEN_EXPR:
4670 case VEC_WIDEN_MULT_ODD_EXPR:
4671 case VEC_WIDEN_LSHIFT_HI_EXPR:
4672 case VEC_WIDEN_LSHIFT_LO_EXPR:
4673 case VEC_PERM_EXPR:
4674 return NULL;
4676 /* Misc codes. */
4677 case ADDR_SPACE_CONVERT_EXPR:
4678 case FIXED_CONVERT_EXPR:
4679 case OBJ_TYPE_REF:
4680 case WITH_SIZE_EXPR:
4681 return NULL;
4683 case DOT_PROD_EXPR:
4684 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4685 && SCALAR_INT_MODE_P (mode))
4688 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4689 0)))
4690 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4691 inner_mode);
4693 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4694 1)))
4695 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4696 inner_mode);
4697 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4698 return simplify_gen_binary (PLUS, mode, op0, op2);
4700 return NULL;
4702 case WIDEN_MULT_EXPR:
4703 case WIDEN_MULT_PLUS_EXPR:
4704 case WIDEN_MULT_MINUS_EXPR:
4705 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4706 && SCALAR_INT_MODE_P (mode))
4708 inner_mode = GET_MODE (op0);
4709 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4710 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4711 else
4712 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4713 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4714 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4715 else
4716 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4717 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4718 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4719 return op0;
4720 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4721 return simplify_gen_binary (PLUS, mode, op0, op2);
4722 else
4723 return simplify_gen_binary (MINUS, mode, op2, op0);
4725 return NULL;
4727 case MULT_HIGHPART_EXPR:
4728 /* ??? Similar to the above. */
4729 return NULL;
4731 case WIDEN_SUM_EXPR:
4732 case WIDEN_LSHIFT_EXPR:
4733 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4734 && SCALAR_INT_MODE_P (mode))
4737 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4738 0)))
4739 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4740 inner_mode);
4741 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4742 ? ASHIFT : PLUS, mode, op0, op1);
4744 return NULL;
4746 case FMA_EXPR:
4747 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4749 default:
4750 flag_unsupported:
4751 #ifdef ENABLE_CHECKING
4752 debug_tree (exp);
4753 gcc_unreachable ();
4754 #else
4755 return NULL;
4756 #endif
4760 /* Return an RTX equivalent to the source bind value of the tree expression
4761 EXP. */
4763 static rtx
4764 expand_debug_source_expr (tree exp)
4766 rtx op0 = NULL_RTX;
4767 machine_mode mode = VOIDmode, inner_mode;
4769 switch (TREE_CODE (exp))
4771 case PARM_DECL:
4773 mode = DECL_MODE (exp);
4774 op0 = expand_debug_parm_decl (exp);
4775 if (op0)
4776 break;
4777 /* See if this isn't an argument that has been completely
4778 optimized out. */
4779 if (!DECL_RTL_SET_P (exp)
4780 && !DECL_INCOMING_RTL (exp)
4781 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4783 tree aexp = DECL_ORIGIN (exp);
4784 if (DECL_CONTEXT (aexp)
4785 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4787 vec<tree, va_gc> **debug_args;
4788 unsigned int ix;
4789 tree ddecl;
4790 debug_args = decl_debug_args_lookup (current_function_decl);
4791 if (debug_args != NULL)
4793 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4794 ix += 2)
4795 if (ddecl == aexp)
4796 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4800 break;
4802 default:
4803 break;
4806 if (op0 == NULL_RTX)
4807 return NULL_RTX;
4809 inner_mode = GET_MODE (op0);
4810 if (mode == inner_mode)
4811 return op0;
4813 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4815 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4816 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4817 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4818 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4819 else
4820 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4822 else if (FLOAT_MODE_P (mode))
4823 gcc_unreachable ();
4824 else if (FLOAT_MODE_P (inner_mode))
4826 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4827 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4828 else
4829 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4831 else if (CONSTANT_P (op0)
4832 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4833 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4834 subreg_lowpart_offset (mode, inner_mode));
4835 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4836 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4837 else
4838 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4840 return op0;
4843 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4844 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4845 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4847 static void
4848 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4850 rtx exp = *exp_p;
4852 if (exp == NULL_RTX)
4853 return;
4855 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4856 return;
4858 if (depth == 4)
4860 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4861 rtx dval = make_debug_expr_from_rtl (exp);
4863 /* Emit a debug bind insn before INSN. */
4864 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4865 DEBUG_EXPR_TREE_DECL (dval), exp,
4866 VAR_INIT_STATUS_INITIALIZED);
4868 emit_debug_insn_before (bind, insn);
4869 *exp_p = dval;
4870 return;
4873 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4874 int i, j;
4875 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4876 switch (*format_ptr++)
4878 case 'e':
4879 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4880 break;
4882 case 'E':
4883 case 'V':
4884 for (j = 0; j < XVECLEN (exp, i); j++)
4885 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4886 break;
4888 default:
4889 break;
4893 /* Expand the _LOCs in debug insns. We run this after expanding all
4894 regular insns, so that any variables referenced in the function
4895 will have their DECL_RTLs set. */
4897 static void
4898 expand_debug_locations (void)
4900 rtx_insn *insn;
4901 rtx_insn *last = get_last_insn ();
4902 int save_strict_alias = flag_strict_aliasing;
4904 /* New alias sets while setting up memory attributes cause
4905 -fcompare-debug failures, even though it doesn't bring about any
4906 codegen changes. */
4907 flag_strict_aliasing = 0;
4909 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4910 if (DEBUG_INSN_P (insn))
4912 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4913 rtx val;
4914 rtx_insn *prev_insn, *insn2;
4915 machine_mode mode;
4917 if (value == NULL_TREE)
4918 val = NULL_RTX;
4919 else
4921 if (INSN_VAR_LOCATION_STATUS (insn)
4922 == VAR_INIT_STATUS_UNINITIALIZED)
4923 val = expand_debug_source_expr (value);
4924 else
4925 val = expand_debug_expr (value);
4926 gcc_assert (last == get_last_insn ());
4929 if (!val)
4930 val = gen_rtx_UNKNOWN_VAR_LOC ();
4931 else
4933 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4935 gcc_assert (mode == GET_MODE (val)
4936 || (GET_MODE (val) == VOIDmode
4937 && (CONST_SCALAR_INT_P (val)
4938 || GET_CODE (val) == CONST_FIXED
4939 || GET_CODE (val) == LABEL_REF)));
4942 INSN_VAR_LOCATION_LOC (insn) = val;
4943 prev_insn = PREV_INSN (insn);
4944 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4945 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
4948 flag_strict_aliasing = save_strict_alias;
4951 /* Expand basic block BB from GIMPLE trees to RTL. */
4953 static basic_block
4954 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
4956 gimple_stmt_iterator gsi;
4957 gimple_seq stmts;
4958 gimple stmt = NULL;
4959 rtx_note *note;
4960 rtx_insn *last;
4961 edge e;
4962 edge_iterator ei;
4964 if (dump_file)
4965 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4966 bb->index);
4968 /* Note that since we are now transitioning from GIMPLE to RTL, we
4969 cannot use the gsi_*_bb() routines because they expect the basic
4970 block to be in GIMPLE, instead of RTL. Therefore, we need to
4971 access the BB sequence directly. */
4972 stmts = bb_seq (bb);
4973 bb->il.gimple.seq = NULL;
4974 bb->il.gimple.phi_nodes = NULL;
4975 rtl_profile_for_bb (bb);
4976 init_rtl_bb_info (bb);
4977 bb->flags |= BB_RTL;
4979 /* Remove the RETURN_EXPR if we may fall though to the exit
4980 instead. */
4981 gsi = gsi_last (stmts);
4982 if (!gsi_end_p (gsi)
4983 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
4985 gimple ret_stmt = gsi_stmt (gsi);
4987 gcc_assert (single_succ_p (bb));
4988 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
4990 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4991 && !gimple_return_retval (ret_stmt))
4993 gsi_remove (&gsi, false);
4994 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4998 gsi = gsi_start (stmts);
4999 if (!gsi_end_p (gsi))
5001 stmt = gsi_stmt (gsi);
5002 if (gimple_code (stmt) != GIMPLE_LABEL)
5003 stmt = NULL;
5006 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5008 if (stmt || elt)
5010 last = get_last_insn ();
5012 if (stmt)
5014 expand_gimple_stmt (stmt);
5015 gsi_next (&gsi);
5018 if (elt)
5019 emit_label (*elt);
5021 /* Java emits line number notes in the top of labels.
5022 ??? Make this go away once line number notes are obsoleted. */
5023 BB_HEAD (bb) = NEXT_INSN (last);
5024 if (NOTE_P (BB_HEAD (bb)))
5025 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5026 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5028 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5030 else
5031 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5033 NOTE_BASIC_BLOCK (note) = bb;
5035 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5037 basic_block new_bb;
5039 stmt = gsi_stmt (gsi);
5041 /* If this statement is a non-debug one, and we generate debug
5042 insns, then this one might be the last real use of a TERed
5043 SSA_NAME, but where there are still some debug uses further
5044 down. Expanding the current SSA name in such further debug
5045 uses by their RHS might lead to wrong debug info, as coalescing
5046 might make the operands of such RHS be placed into the same
5047 pseudo as something else. Like so:
5048 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5049 use(a_1);
5050 a_2 = ...
5051 #DEBUG ... => a_1
5052 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5053 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5054 the write to a_2 would actually have clobbered the place which
5055 formerly held a_0.
5057 So, instead of that, we recognize the situation, and generate
5058 debug temporaries at the last real use of TERed SSA names:
5059 a_1 = a_0 + 1;
5060 #DEBUG #D1 => a_1
5061 use(a_1);
5062 a_2 = ...
5063 #DEBUG ... => #D1
5065 if (MAY_HAVE_DEBUG_INSNS
5066 && SA.values
5067 && !is_gimple_debug (stmt))
5069 ssa_op_iter iter;
5070 tree op;
5071 gimple def;
5073 location_t sloc = curr_insn_location ();
5075 /* Look for SSA names that have their last use here (TERed
5076 names always have only one real use). */
5077 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5078 if ((def = get_gimple_for_ssa_name (op)))
5080 imm_use_iterator imm_iter;
5081 use_operand_p use_p;
5082 bool have_debug_uses = false;
5084 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5086 if (gimple_debug_bind_p (USE_STMT (use_p)))
5088 have_debug_uses = true;
5089 break;
5093 if (have_debug_uses)
5095 /* OP is a TERed SSA name, with DEF it's defining
5096 statement, and where OP is used in further debug
5097 instructions. Generate a debug temporary, and
5098 replace all uses of OP in debug insns with that
5099 temporary. */
5100 gimple debugstmt;
5101 tree value = gimple_assign_rhs_to_tree (def);
5102 tree vexpr = make_node (DEBUG_EXPR_DECL);
5103 rtx val;
5104 machine_mode mode;
5106 set_curr_insn_location (gimple_location (def));
5108 DECL_ARTIFICIAL (vexpr) = 1;
5109 TREE_TYPE (vexpr) = TREE_TYPE (value);
5110 if (DECL_P (value))
5111 mode = DECL_MODE (value);
5112 else
5113 mode = TYPE_MODE (TREE_TYPE (value));
5114 DECL_MODE (vexpr) = mode;
5116 val = gen_rtx_VAR_LOCATION
5117 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5119 emit_debug_insn (val);
5121 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5123 if (!gimple_debug_bind_p (debugstmt))
5124 continue;
5126 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5127 SET_USE (use_p, vexpr);
5129 update_stmt (debugstmt);
5133 set_curr_insn_location (sloc);
5136 currently_expanding_gimple_stmt = stmt;
5138 /* Expand this statement, then evaluate the resulting RTL and
5139 fixup the CFG accordingly. */
5140 if (gimple_code (stmt) == GIMPLE_COND)
5142 new_bb = expand_gimple_cond (bb, stmt);
5143 if (new_bb)
5144 return new_bb;
5146 else if (gimple_debug_bind_p (stmt))
5148 location_t sloc = curr_insn_location ();
5149 gimple_stmt_iterator nsi = gsi;
5151 for (;;)
5153 tree var = gimple_debug_bind_get_var (stmt);
5154 tree value;
5155 rtx val;
5156 machine_mode mode;
5158 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5159 && TREE_CODE (var) != LABEL_DECL
5160 && !target_for_debug_bind (var))
5161 goto delink_debug_stmt;
5163 if (gimple_debug_bind_has_value_p (stmt))
5164 value = gimple_debug_bind_get_value (stmt);
5165 else
5166 value = NULL_TREE;
5168 last = get_last_insn ();
5170 set_curr_insn_location (gimple_location (stmt));
5172 if (DECL_P (var))
5173 mode = DECL_MODE (var);
5174 else
5175 mode = TYPE_MODE (TREE_TYPE (var));
5177 val = gen_rtx_VAR_LOCATION
5178 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5180 emit_debug_insn (val);
5182 if (dump_file && (dump_flags & TDF_DETAILS))
5184 /* We can't dump the insn with a TREE where an RTX
5185 is expected. */
5186 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5187 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5188 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5191 delink_debug_stmt:
5192 /* In order not to generate too many debug temporaries,
5193 we delink all uses of debug statements we already expanded.
5194 Therefore debug statements between definition and real
5195 use of TERed SSA names will continue to use the SSA name,
5196 and not be replaced with debug temps. */
5197 delink_stmt_imm_use (stmt);
5199 gsi = nsi;
5200 gsi_next (&nsi);
5201 if (gsi_end_p (nsi))
5202 break;
5203 stmt = gsi_stmt (nsi);
5204 if (!gimple_debug_bind_p (stmt))
5205 break;
5208 set_curr_insn_location (sloc);
5210 else if (gimple_debug_source_bind_p (stmt))
5212 location_t sloc = curr_insn_location ();
5213 tree var = gimple_debug_source_bind_get_var (stmt);
5214 tree value = gimple_debug_source_bind_get_value (stmt);
5215 rtx val;
5216 machine_mode mode;
5218 last = get_last_insn ();
5220 set_curr_insn_location (gimple_location (stmt));
5222 mode = DECL_MODE (var);
5224 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5225 VAR_INIT_STATUS_UNINITIALIZED);
5227 emit_debug_insn (val);
5229 if (dump_file && (dump_flags & TDF_DETAILS))
5231 /* We can't dump the insn with a TREE where an RTX
5232 is expected. */
5233 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5234 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5235 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5238 set_curr_insn_location (sloc);
5240 else
5242 if (is_gimple_call (stmt)
5243 && gimple_call_tail_p (stmt)
5244 && disable_tail_calls)
5245 gimple_call_set_tail (stmt, false);
5247 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
5249 bool can_fallthru;
5250 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5251 if (new_bb)
5253 if (can_fallthru)
5254 bb = new_bb;
5255 else
5256 return new_bb;
5259 else
5261 def_operand_p def_p;
5262 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5264 if (def_p != NULL)
5266 /* Ignore this stmt if it is in the list of
5267 replaceable expressions. */
5268 if (SA.values
5269 && bitmap_bit_p (SA.values,
5270 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5271 continue;
5273 last = expand_gimple_stmt (stmt);
5274 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5279 currently_expanding_gimple_stmt = NULL;
5281 /* Expand implicit goto and convert goto_locus. */
5282 FOR_EACH_EDGE (e, ei, bb->succs)
5284 if (e->goto_locus != UNKNOWN_LOCATION)
5285 set_curr_insn_location (e->goto_locus);
5286 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5288 emit_jump (label_rtx_for_bb (e->dest));
5289 e->flags &= ~EDGE_FALLTHRU;
5293 /* Expanded RTL can create a jump in the last instruction of block.
5294 This later might be assumed to be a jump to successor and break edge insertion.
5295 We need to insert dummy move to prevent this. PR41440. */
5296 if (single_succ_p (bb)
5297 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5298 && (last = get_last_insn ())
5299 && JUMP_P (last))
5301 rtx dummy = gen_reg_rtx (SImode);
5302 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5305 do_pending_stack_adjust ();
5307 /* Find the block tail. The last insn in the block is the insn
5308 before a barrier and/or table jump insn. */
5309 last = get_last_insn ();
5310 if (BARRIER_P (last))
5311 last = PREV_INSN (last);
5312 if (JUMP_TABLE_DATA_P (last))
5313 last = PREV_INSN (PREV_INSN (last));
5314 BB_END (bb) = last;
5316 update_bb_for_insn (bb);
5318 return bb;
5322 /* Create a basic block for initialization code. */
5324 static basic_block
5325 construct_init_block (void)
5327 basic_block init_block, first_block;
5328 edge e = NULL;
5329 int flags;
5331 /* Multiple entry points not supported yet. */
5332 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5333 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5334 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5335 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5336 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5338 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5340 /* When entry edge points to first basic block, we don't need jump,
5341 otherwise we have to jump into proper target. */
5342 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5344 tree label = gimple_block_label (e->dest);
5346 emit_jump (label_rtx (label));
5347 flags = 0;
5349 else
5350 flags = EDGE_FALLTHRU;
5352 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5353 get_last_insn (),
5354 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5355 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5356 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5357 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5358 if (e)
5360 first_block = e->dest;
5361 redirect_edge_succ (e, init_block);
5362 e = make_edge (init_block, first_block, flags);
5364 else
5365 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5366 e->probability = REG_BR_PROB_BASE;
5367 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5369 update_bb_for_insn (init_block);
5370 return init_block;
5373 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5374 found in the block tree. */
5376 static void
5377 set_block_levels (tree block, int level)
5379 while (block)
5381 BLOCK_NUMBER (block) = level;
5382 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5383 block = BLOCK_CHAIN (block);
5387 /* Create a block containing landing pads and similar stuff. */
5389 static void
5390 construct_exit_block (void)
5392 rtx_insn *head = get_last_insn ();
5393 rtx_insn *end;
5394 basic_block exit_block;
5395 edge e, e2;
5396 unsigned ix;
5397 edge_iterator ei;
5398 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5399 rtx_insn *orig_end = BB_END (prev_bb);
5401 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5403 /* Make sure the locus is set to the end of the function, so that
5404 epilogue line numbers and warnings are set properly. */
5405 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5406 input_location = cfun->function_end_locus;
5408 /* Generate rtl for function exit. */
5409 expand_function_end ();
5411 end = get_last_insn ();
5412 if (head == end)
5413 return;
5414 /* While emitting the function end we could move end of the last basic
5415 block. */
5416 BB_END (prev_bb) = orig_end;
5417 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5418 head = NEXT_INSN (head);
5419 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5420 bb frequency counting will be confused. Any instructions before that
5421 label are emitted for the case where PREV_BB falls through into the
5422 exit block, so append those instructions to prev_bb in that case. */
5423 if (NEXT_INSN (head) != return_label)
5425 while (NEXT_INSN (head) != return_label)
5427 if (!NOTE_P (NEXT_INSN (head)))
5428 BB_END (prev_bb) = NEXT_INSN (head);
5429 head = NEXT_INSN (head);
5432 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5433 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5434 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5435 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5437 ix = 0;
5438 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5440 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5441 if (!(e->flags & EDGE_ABNORMAL))
5442 redirect_edge_succ (e, exit_block);
5443 else
5444 ix++;
5447 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5448 e->probability = REG_BR_PROB_BASE;
5449 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5450 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5451 if (e2 != e)
5453 e->count -= e2->count;
5454 exit_block->count -= e2->count;
5455 exit_block->frequency -= EDGE_FREQUENCY (e2);
5457 if (e->count < 0)
5458 e->count = 0;
5459 if (exit_block->count < 0)
5460 exit_block->count = 0;
5461 if (exit_block->frequency < 0)
5462 exit_block->frequency = 0;
5463 update_bb_for_insn (exit_block);
5466 /* Helper function for discover_nonconstant_array_refs.
5467 Look for ARRAY_REF nodes with non-constant indexes and mark them
5468 addressable. */
5470 static tree
5471 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5472 void *data ATTRIBUTE_UNUSED)
5474 tree t = *tp;
5476 if (IS_TYPE_OR_DECL_P (t))
5477 *walk_subtrees = 0;
5478 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5480 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5481 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5482 && (!TREE_OPERAND (t, 2)
5483 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5484 || (TREE_CODE (t) == COMPONENT_REF
5485 && (!TREE_OPERAND (t,2)
5486 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5487 || TREE_CODE (t) == BIT_FIELD_REF
5488 || TREE_CODE (t) == REALPART_EXPR
5489 || TREE_CODE (t) == IMAGPART_EXPR
5490 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5491 || CONVERT_EXPR_P (t))
5492 t = TREE_OPERAND (t, 0);
5494 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5496 t = get_base_address (t);
5497 if (t && DECL_P (t)
5498 && DECL_MODE (t) != BLKmode)
5499 TREE_ADDRESSABLE (t) = 1;
5502 *walk_subtrees = 0;
5505 return NULL_TREE;
5508 /* RTL expansion is not able to compile array references with variable
5509 offsets for arrays stored in single register. Discover such
5510 expressions and mark variables as addressable to avoid this
5511 scenario. */
5513 static void
5514 discover_nonconstant_array_refs (void)
5516 basic_block bb;
5517 gimple_stmt_iterator gsi;
5519 FOR_EACH_BB_FN (bb, cfun)
5520 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5522 gimple stmt = gsi_stmt (gsi);
5523 if (!is_gimple_debug (stmt))
5524 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5528 /* This function sets crtl->args.internal_arg_pointer to a virtual
5529 register if DRAP is needed. Local register allocator will replace
5530 virtual_incoming_args_rtx with the virtual register. */
5532 static void
5533 expand_stack_alignment (void)
5535 rtx drap_rtx;
5536 unsigned int preferred_stack_boundary;
5538 if (! SUPPORTS_STACK_ALIGNMENT)
5539 return;
5541 if (cfun->calls_alloca
5542 || cfun->has_nonlocal_label
5543 || crtl->has_nonlocal_goto)
5544 crtl->need_drap = true;
5546 /* Call update_stack_boundary here again to update incoming stack
5547 boundary. It may set incoming stack alignment to a different
5548 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5549 use the minimum incoming stack alignment to check if it is OK
5550 to perform sibcall optimization since sibcall optimization will
5551 only align the outgoing stack to incoming stack boundary. */
5552 if (targetm.calls.update_stack_boundary)
5553 targetm.calls.update_stack_boundary ();
5555 /* The incoming stack frame has to be aligned at least at
5556 parm_stack_boundary. */
5557 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5559 /* Update crtl->stack_alignment_estimated and use it later to align
5560 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5561 exceptions since callgraph doesn't collect incoming stack alignment
5562 in this case. */
5563 if (cfun->can_throw_non_call_exceptions
5564 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5565 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5566 else
5567 preferred_stack_boundary = crtl->preferred_stack_boundary;
5568 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5569 crtl->stack_alignment_estimated = preferred_stack_boundary;
5570 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5571 crtl->stack_alignment_needed = preferred_stack_boundary;
5573 gcc_assert (crtl->stack_alignment_needed
5574 <= crtl->stack_alignment_estimated);
5576 crtl->stack_realign_needed
5577 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5578 crtl->stack_realign_tried = crtl->stack_realign_needed;
5580 crtl->stack_realign_processed = true;
5582 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5583 alignment. */
5584 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5585 drap_rtx = targetm.calls.get_drap_rtx ();
5587 /* stack_realign_drap and drap_rtx must match. */
5588 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5590 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5591 if (NULL != drap_rtx)
5593 crtl->args.internal_arg_pointer = drap_rtx;
5595 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5596 needed. */
5597 fixup_tail_calls ();
5602 static void
5603 expand_main_function (void)
5605 #if (defined(INVOKE__main) \
5606 || (!defined(HAS_INIT_SECTION) \
5607 && !defined(INIT_SECTION_ASM_OP) \
5608 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5609 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5610 #endif
5614 /* Expand code to initialize the stack_protect_guard. This is invoked at
5615 the beginning of a function to be protected. */
5617 #ifndef HAVE_stack_protect_set
5618 # define HAVE_stack_protect_set 0
5619 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5620 #endif
5622 static void
5623 stack_protect_prologue (void)
5625 tree guard_decl = targetm.stack_protect_guard ();
5626 rtx x, y;
5628 x = expand_normal (crtl->stack_protect_guard);
5629 y = expand_normal (guard_decl);
5631 /* Allow the target to copy from Y to X without leaking Y into a
5632 register. */
5633 if (HAVE_stack_protect_set)
5635 rtx insn = gen_stack_protect_set (x, y);
5636 if (insn)
5638 emit_insn (insn);
5639 return;
5643 /* Otherwise do a straight move. */
5644 emit_move_insn (x, y);
5647 /* Translate the intermediate representation contained in the CFG
5648 from GIMPLE trees to RTL.
5650 We do conversion per basic block and preserve/update the tree CFG.
5651 This implies we have to do some magic as the CFG can simultaneously
5652 consist of basic blocks containing RTL and GIMPLE trees. This can
5653 confuse the CFG hooks, so be careful to not manipulate CFG during
5654 the expansion. */
5656 namespace {
5658 const pass_data pass_data_expand =
5660 RTL_PASS, /* type */
5661 "expand", /* name */
5662 OPTGROUP_NONE, /* optinfo_flags */
5663 TV_EXPAND, /* tv_id */
5664 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5665 | PROP_gimple_lcx
5666 | PROP_gimple_lvec ), /* properties_required */
5667 PROP_rtl, /* properties_provided */
5668 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5669 0, /* todo_flags_start */
5670 0, /* todo_flags_finish */
5673 class pass_expand : public rtl_opt_pass
5675 public:
5676 pass_expand (gcc::context *ctxt)
5677 : rtl_opt_pass (pass_data_expand, ctxt)
5680 /* opt_pass methods: */
5681 virtual unsigned int execute (function *);
5683 }; // class pass_expand
5685 unsigned int
5686 pass_expand::execute (function *fun)
5688 basic_block bb, init_block;
5689 sbitmap blocks;
5690 edge_iterator ei;
5691 edge e;
5692 rtx_insn *var_seq, *var_ret_seq;
5693 unsigned i;
5695 timevar_push (TV_OUT_OF_SSA);
5696 rewrite_out_of_ssa (&SA);
5697 timevar_pop (TV_OUT_OF_SSA);
5698 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5700 /* Make sure all values used by the optimization passes have sane
5701 defaults. */
5702 reg_renumber = 0;
5704 /* Some backends want to know that we are expanding to RTL. */
5705 currently_expanding_to_rtl = 1;
5706 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5707 free_dominance_info (CDI_DOMINATORS);
5709 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5711 if (chkp_function_instrumented_p (current_function_decl))
5712 chkp_reset_rtl_bounds ();
5714 insn_locations_init ();
5715 if (!DECL_IS_BUILTIN (current_function_decl))
5717 /* Eventually, all FEs should explicitly set function_start_locus. */
5718 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5719 set_curr_insn_location
5720 (DECL_SOURCE_LOCATION (current_function_decl));
5721 else
5722 set_curr_insn_location (fun->function_start_locus);
5724 else
5725 set_curr_insn_location (UNKNOWN_LOCATION);
5726 prologue_location = curr_insn_location ();
5728 #ifdef INSN_SCHEDULING
5729 init_sched_attrs ();
5730 #endif
5732 /* Make sure first insn is a note even if we don't want linenums.
5733 This makes sure the first insn will never be deleted.
5734 Also, final expects a note to appear there. */
5735 emit_note (NOTE_INSN_DELETED);
5737 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5738 discover_nonconstant_array_refs ();
5740 targetm.expand_to_rtl_hook ();
5741 crtl->stack_alignment_needed = STACK_BOUNDARY;
5742 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5743 crtl->stack_alignment_estimated = 0;
5744 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5745 fun->cfg->max_jumptable_ents = 0;
5747 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5748 of the function section at exapnsion time to predict distance of calls. */
5749 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5751 /* Expand the variables recorded during gimple lowering. */
5752 timevar_push (TV_VAR_EXPAND);
5753 start_sequence ();
5755 var_ret_seq = expand_used_vars ();
5757 var_seq = get_insns ();
5758 end_sequence ();
5759 timevar_pop (TV_VAR_EXPAND);
5761 /* Honor stack protection warnings. */
5762 if (warn_stack_protect)
5764 if (fun->calls_alloca)
5765 warning (OPT_Wstack_protector,
5766 "stack protector not protecting local variables: "
5767 "variable length buffer");
5768 if (has_short_buffer && !crtl->stack_protect_guard)
5769 warning (OPT_Wstack_protector,
5770 "stack protector not protecting function: "
5771 "all local arrays are less than %d bytes long",
5772 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5775 /* Set up parameters and prepare for return, for the function. */
5776 expand_function_start (current_function_decl);
5778 /* If we emitted any instructions for setting up the variables,
5779 emit them before the FUNCTION_START note. */
5780 if (var_seq)
5782 emit_insn_before (var_seq, parm_birth_insn);
5784 /* In expand_function_end we'll insert the alloca save/restore
5785 before parm_birth_insn. We've just insertted an alloca call.
5786 Adjust the pointer to match. */
5787 parm_birth_insn = var_seq;
5790 /* Now that we also have the parameter RTXs, copy them over to our
5791 partitions. */
5792 for (i = 0; i < SA.map->num_partitions; i++)
5794 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5796 if (TREE_CODE (var) != VAR_DECL
5797 && !SA.partition_to_pseudo[i])
5798 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5799 gcc_assert (SA.partition_to_pseudo[i]);
5801 /* If this decl was marked as living in multiple places, reset
5802 this now to NULL. */
5803 if (DECL_RTL_IF_SET (var) == pc_rtx)
5804 SET_DECL_RTL (var, NULL);
5806 /* Some RTL parts really want to look at DECL_RTL(x) when x
5807 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5808 SET_DECL_RTL here making this available, but that would mean
5809 to select one of the potentially many RTLs for one DECL. Instead
5810 of doing that we simply reset the MEM_EXPR of the RTL in question,
5811 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5812 if (!DECL_RTL_SET_P (var))
5814 if (MEM_P (SA.partition_to_pseudo[i]))
5815 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5819 /* If we have a class containing differently aligned pointers
5820 we need to merge those into the corresponding RTL pointer
5821 alignment. */
5822 for (i = 1; i < num_ssa_names; i++)
5824 tree name = ssa_name (i);
5825 int part;
5826 rtx r;
5828 if (!name
5829 /* We might have generated new SSA names in
5830 update_alias_info_with_stack_vars. They will have a NULL
5831 defining statements, and won't be part of the partitioning,
5832 so ignore those. */
5833 || !SSA_NAME_DEF_STMT (name))
5834 continue;
5835 part = var_to_partition (SA.map, name);
5836 if (part == NO_PARTITION)
5837 continue;
5839 /* Adjust all partition members to get the underlying decl of
5840 the representative which we might have created in expand_one_var. */
5841 if (SSA_NAME_VAR (name) == NULL_TREE)
5843 tree leader = partition_to_var (SA.map, part);
5844 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5845 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5847 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5848 continue;
5850 r = SA.partition_to_pseudo[part];
5851 if (REG_P (r))
5852 mark_reg_pointer (r, get_pointer_alignment (name));
5855 /* If this function is `main', emit a call to `__main'
5856 to run global initializers, etc. */
5857 if (DECL_NAME (current_function_decl)
5858 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5859 && DECL_FILE_SCOPE_P (current_function_decl))
5860 expand_main_function ();
5862 /* Initialize the stack_protect_guard field. This must happen after the
5863 call to __main (if any) so that the external decl is initialized. */
5864 if (crtl->stack_protect_guard)
5865 stack_protect_prologue ();
5867 expand_phi_nodes (&SA);
5869 /* Register rtl specific functions for cfg. */
5870 rtl_register_cfg_hooks ();
5872 init_block = construct_init_block ();
5874 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5875 remaining edges later. */
5876 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
5877 e->flags &= ~EDGE_EXECUTABLE;
5879 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
5880 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
5881 next_bb)
5882 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
5884 if (MAY_HAVE_DEBUG_INSNS)
5885 expand_debug_locations ();
5887 /* Free stuff we no longer need after GIMPLE optimizations. */
5888 free_dominance_info (CDI_DOMINATORS);
5889 free_dominance_info (CDI_POST_DOMINATORS);
5890 delete_tree_cfg_annotations ();
5892 timevar_push (TV_OUT_OF_SSA);
5893 finish_out_of_ssa (&SA);
5894 timevar_pop (TV_OUT_OF_SSA);
5896 timevar_push (TV_POST_EXPAND);
5897 /* We are no longer in SSA form. */
5898 fun->gimple_df->in_ssa_p = false;
5899 loops_state_clear (LOOP_CLOSED_SSA);
5901 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5902 conservatively to true until they are all profile aware. */
5903 delete lab_rtx_for_bb;
5904 free_histograms ();
5906 construct_exit_block ();
5907 insn_locations_finalize ();
5909 if (var_ret_seq)
5911 rtx_insn *after = return_label;
5912 rtx_insn *next = NEXT_INSN (after);
5913 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5914 after = next;
5915 emit_insn_after (var_ret_seq, after);
5918 /* Zap the tree EH table. */
5919 set_eh_throw_stmt_table (fun, NULL);
5921 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5922 split edges which edge insertions might do. */
5923 rebuild_jump_labels (get_insns ());
5925 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
5926 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
5928 edge e;
5929 edge_iterator ei;
5930 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5932 if (e->insns.r)
5934 rebuild_jump_labels_chain (e->insns.r);
5935 /* Put insns after parm birth, but before
5936 NOTE_INSNS_FUNCTION_BEG. */
5937 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
5938 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
5940 rtx_insn *insns = e->insns.r;
5941 e->insns.r = NULL;
5942 if (NOTE_P (parm_birth_insn)
5943 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5944 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5945 else
5946 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
5948 else
5949 commit_one_edge_insertion (e);
5951 else
5952 ei_next (&ei);
5956 /* We're done expanding trees to RTL. */
5957 currently_expanding_to_rtl = 0;
5959 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
5960 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
5962 edge e;
5963 edge_iterator ei;
5964 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5966 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5967 e->flags &= ~EDGE_EXECUTABLE;
5969 /* At the moment not all abnormal edges match the RTL
5970 representation. It is safe to remove them here as
5971 find_many_sub_basic_blocks will rediscover them.
5972 In the future we should get this fixed properly. */
5973 if ((e->flags & EDGE_ABNORMAL)
5974 && !(e->flags & EDGE_SIBCALL))
5975 remove_edge (e);
5976 else
5977 ei_next (&ei);
5981 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
5982 bitmap_ones (blocks);
5983 find_many_sub_basic_blocks (blocks);
5984 sbitmap_free (blocks);
5985 purge_all_dead_edges ();
5987 expand_stack_alignment ();
5989 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5990 function. */
5991 if (crtl->tail_call_emit)
5992 fixup_tail_calls ();
5994 /* After initial rtl generation, call back to finish generating
5995 exception support code. We need to do this before cleaning up
5996 the CFG as the code does not expect dead landing pads. */
5997 if (fun->eh->region_tree != NULL)
5998 finish_eh_generation ();
6000 /* Remove unreachable blocks, otherwise we cannot compute dominators
6001 which are needed for loop state verification. As a side-effect
6002 this also compacts blocks.
6003 ??? We cannot remove trivially dead insns here as for example
6004 the DRAP reg on i?86 is not magically live at this point.
6005 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6006 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6008 #ifdef ENABLE_CHECKING
6009 verify_flow_info ();
6010 #endif
6012 /* Initialize pseudos allocated for hard registers. */
6013 emit_initial_value_sets ();
6015 /* And finally unshare all RTL. */
6016 unshare_all_rtl ();
6018 /* There's no need to defer outputting this function any more; we
6019 know we want to output it. */
6020 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6022 /* Now that we're done expanding trees to RTL, we shouldn't have any
6023 more CONCATs anywhere. */
6024 generating_concat_p = 0;
6026 if (dump_file)
6028 fprintf (dump_file,
6029 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6030 /* And the pass manager will dump RTL for us. */
6033 /* If we're emitting a nested function, make sure its parent gets
6034 emitted as well. Doing otherwise confuses debug info. */
6036 tree parent;
6037 for (parent = DECL_CONTEXT (current_function_decl);
6038 parent != NULL_TREE;
6039 parent = get_containing_scope (parent))
6040 if (TREE_CODE (parent) == FUNCTION_DECL)
6041 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6044 /* We are now committed to emitting code for this function. Do any
6045 preparation, such as emitting abstract debug info for the inline
6046 before it gets mangled by optimization. */
6047 if (cgraph_function_possibly_inlined_p (current_function_decl))
6048 (*debug_hooks->outlining_inline_function) (current_function_decl);
6050 TREE_ASM_WRITTEN (current_function_decl) = 1;
6052 /* After expanding, the return labels are no longer needed. */
6053 return_label = NULL;
6054 naked_return_label = NULL;
6056 /* After expanding, the tm_restart map is no longer needed. */
6057 if (fun->gimple_df->tm_restart)
6059 htab_delete (fun->gimple_df->tm_restart);
6060 fun->gimple_df->tm_restart = NULL;
6063 /* Tag the blocks with a depth number so that change_scope can find
6064 the common parent easily. */
6065 set_block_levels (DECL_INITIAL (fun->decl), 0);
6066 default_rtl_profile ();
6068 timevar_pop (TV_POST_EXPAND);
6070 return 0;
6073 } // anon namespace
6075 rtl_opt_pass *
6076 make_pass_expand (gcc::context *ctxt)
6078 return new pass_expand (ctxt);