Remove VEC_LSHIFT_EXPR and vec_shl_optab
[official-gcc.git] / gcc / cfgexpand.c
blobe889409c25b33cfdd2b9795559e9732102ae6ce3
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "varasm.h"
29 #include "stor-layout.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "machmode.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfgrtl.h"
43 #include "cfganal.h"
44 #include "cfgbuild.h"
45 #include "cfgcleanup.h"
46 #include "basic-block.h"
47 #include "expr.h"
48 #include "langhooks.h"
49 #include "bitmap.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "tree-eh.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-iterator.h"
57 #include "gimple-walk.h"
58 #include "gimple-ssa.h"
59 #include "cgraph.h"
60 #include "tree-cfg.h"
61 #include "tree-phinodes.h"
62 #include "ssa-iterators.h"
63 #include "tree-ssanames.h"
64 #include "tree-dfa.h"
65 #include "tree-ssa.h"
66 #include "tree-pass.h"
67 #include "except.h"
68 #include "flags.h"
69 #include "diagnostic.h"
70 #include "gimple-pretty-print.h"
71 #include "toplev.h"
72 #include "debug.h"
73 #include "params.h"
74 #include "tree-inline.h"
75 #include "value-prof.h"
76 #include "target.h"
77 #include "tree-ssa-live.h"
78 #include "tree-outof-ssa.h"
79 #include "sbitmap.h"
80 #include "cfgloop.h"
81 #include "regs.h" /* For reg_renumber. */
82 #include "insn-attr.h" /* For INSN_SCHEDULING. */
83 #include "asan.h"
84 #include "tree-ssa-address.h"
85 #include "recog.h"
86 #include "output.h"
87 #include "builtins.h"
89 /* Some systems use __main in a way incompatible with its use in gcc, in these
90 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
91 give the same symbol without quotes for an alternative entry point. You
92 must define both, or neither. */
93 #ifndef NAME__MAIN
94 #define NAME__MAIN "__main"
95 #endif
97 /* This variable holds information helping the rewriting of SSA trees
98 into RTL. */
99 struct ssaexpand SA;
101 /* This variable holds the currently expanded gimple statement for purposes
102 of comminucating the profile info to the builtin expanders. */
103 gimple currently_expanding_gimple_stmt;
105 static rtx expand_debug_expr (tree);
107 /* Return an expression tree corresponding to the RHS of GIMPLE
108 statement STMT. */
110 tree
111 gimple_assign_rhs_to_tree (gimple stmt)
113 tree t;
114 enum gimple_rhs_class grhs_class;
116 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
118 if (grhs_class == GIMPLE_TERNARY_RHS)
119 t = build3 (gimple_assign_rhs_code (stmt),
120 TREE_TYPE (gimple_assign_lhs (stmt)),
121 gimple_assign_rhs1 (stmt),
122 gimple_assign_rhs2 (stmt),
123 gimple_assign_rhs3 (stmt));
124 else if (grhs_class == GIMPLE_BINARY_RHS)
125 t = build2 (gimple_assign_rhs_code (stmt),
126 TREE_TYPE (gimple_assign_lhs (stmt)),
127 gimple_assign_rhs1 (stmt),
128 gimple_assign_rhs2 (stmt));
129 else if (grhs_class == GIMPLE_UNARY_RHS)
130 t = build1 (gimple_assign_rhs_code (stmt),
131 TREE_TYPE (gimple_assign_lhs (stmt)),
132 gimple_assign_rhs1 (stmt));
133 else if (grhs_class == GIMPLE_SINGLE_RHS)
135 t = gimple_assign_rhs1 (stmt);
136 /* Avoid modifying this tree in place below. */
137 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
138 && gimple_location (stmt) != EXPR_LOCATION (t))
139 || (gimple_block (stmt)
140 && currently_expanding_to_rtl
141 && EXPR_P (t)))
142 t = copy_node (t);
144 else
145 gcc_unreachable ();
147 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
148 SET_EXPR_LOCATION (t, gimple_location (stmt));
150 return t;
154 #ifndef STACK_ALIGNMENT_NEEDED
155 #define STACK_ALIGNMENT_NEEDED 1
156 #endif
158 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
160 /* Associate declaration T with storage space X. If T is no
161 SSA name this is exactly SET_DECL_RTL, otherwise make the
162 partition of T associated with X. */
163 static inline void
164 set_rtl (tree t, rtx x)
166 if (TREE_CODE (t) == SSA_NAME)
168 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
169 if (x && !MEM_P (x))
170 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
171 /* For the benefit of debug information at -O0 (where vartracking
172 doesn't run) record the place also in the base DECL if it's
173 a normal variable (not a parameter). */
174 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
176 tree var = SSA_NAME_VAR (t);
177 /* If we don't yet have something recorded, just record it now. */
178 if (!DECL_RTL_SET_P (var))
179 SET_DECL_RTL (var, x);
180 /* If we have it set already to "multiple places" don't
181 change this. */
182 else if (DECL_RTL (var) == pc_rtx)
184 /* If we have something recorded and it's not the same place
185 as we want to record now, we have multiple partitions for the
186 same base variable, with different places. We can't just
187 randomly chose one, hence we have to say that we don't know.
188 This only happens with optimization, and there var-tracking
189 will figure out the right thing. */
190 else if (DECL_RTL (var) != x)
191 SET_DECL_RTL (var, pc_rtx);
194 else
195 SET_DECL_RTL (t, x);
198 /* This structure holds data relevant to one variable that will be
199 placed in a stack slot. */
200 struct stack_var
202 /* The Variable. */
203 tree decl;
205 /* Initially, the size of the variable. Later, the size of the partition,
206 if this variable becomes it's partition's representative. */
207 HOST_WIDE_INT size;
209 /* The *byte* alignment required for this variable. Or as, with the
210 size, the alignment for this partition. */
211 unsigned int alignb;
213 /* The partition representative. */
214 size_t representative;
216 /* The next stack variable in the partition, or EOC. */
217 size_t next;
219 /* The numbers of conflicting stack variables. */
220 bitmap conflicts;
223 #define EOC ((size_t)-1)
225 /* We have an array of such objects while deciding allocation. */
226 static struct stack_var *stack_vars;
227 static size_t stack_vars_alloc;
228 static size_t stack_vars_num;
229 static hash_map<tree, size_t> *decl_to_stack_part;
231 /* Conflict bitmaps go on this obstack. This allows us to destroy
232 all of them in one big sweep. */
233 static bitmap_obstack stack_var_bitmap_obstack;
235 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
236 is non-decreasing. */
237 static size_t *stack_vars_sorted;
239 /* The phase of the stack frame. This is the known misalignment of
240 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
241 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
242 static int frame_phase;
244 /* Used during expand_used_vars to remember if we saw any decls for
245 which we'd like to enable stack smashing protection. */
246 static bool has_protected_decls;
248 /* Used during expand_used_vars. Remember if we say a character buffer
249 smaller than our cutoff threshold. Used for -Wstack-protector. */
250 static bool has_short_buffer;
252 /* Compute the byte alignment to use for DECL. Ignore alignment
253 we can't do with expected alignment of the stack boundary. */
255 static unsigned int
256 align_local_variable (tree decl)
258 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
259 DECL_ALIGN (decl) = align;
260 return align / BITS_PER_UNIT;
263 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
264 Return the frame offset. */
266 static HOST_WIDE_INT
267 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
269 HOST_WIDE_INT offset, new_frame_offset;
271 new_frame_offset = frame_offset;
272 if (FRAME_GROWS_DOWNWARD)
274 new_frame_offset -= size + frame_phase;
275 new_frame_offset &= -align;
276 new_frame_offset += frame_phase;
277 offset = new_frame_offset;
279 else
281 new_frame_offset -= frame_phase;
282 new_frame_offset += align - 1;
283 new_frame_offset &= -align;
284 new_frame_offset += frame_phase;
285 offset = new_frame_offset;
286 new_frame_offset += size;
288 frame_offset = new_frame_offset;
290 if (frame_offset_overflow (frame_offset, cfun->decl))
291 frame_offset = offset = 0;
293 return offset;
296 /* Accumulate DECL into STACK_VARS. */
298 static void
299 add_stack_var (tree decl)
301 struct stack_var *v;
303 if (stack_vars_num >= stack_vars_alloc)
305 if (stack_vars_alloc)
306 stack_vars_alloc = stack_vars_alloc * 3 / 2;
307 else
308 stack_vars_alloc = 32;
309 stack_vars
310 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
312 if (!decl_to_stack_part)
313 decl_to_stack_part = new hash_map<tree, size_t>;
315 v = &stack_vars[stack_vars_num];
316 decl_to_stack_part->put (decl, stack_vars_num);
318 v->decl = decl;
319 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
320 /* Ensure that all variables have size, so that &a != &b for any two
321 variables that are simultaneously live. */
322 if (v->size == 0)
323 v->size = 1;
324 v->alignb = align_local_variable (SSAVAR (decl));
325 /* An alignment of zero can mightily confuse us later. */
326 gcc_assert (v->alignb != 0);
328 /* All variables are initially in their own partition. */
329 v->representative = stack_vars_num;
330 v->next = EOC;
332 /* All variables initially conflict with no other. */
333 v->conflicts = NULL;
335 /* Ensure that this decl doesn't get put onto the list twice. */
336 set_rtl (decl, pc_rtx);
338 stack_vars_num++;
341 /* Make the decls associated with luid's X and Y conflict. */
343 static void
344 add_stack_var_conflict (size_t x, size_t y)
346 struct stack_var *a = &stack_vars[x];
347 struct stack_var *b = &stack_vars[y];
348 if (!a->conflicts)
349 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
350 if (!b->conflicts)
351 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
352 bitmap_set_bit (a->conflicts, y);
353 bitmap_set_bit (b->conflicts, x);
356 /* Check whether the decls associated with luid's X and Y conflict. */
358 static bool
359 stack_var_conflict_p (size_t x, size_t y)
361 struct stack_var *a = &stack_vars[x];
362 struct stack_var *b = &stack_vars[y];
363 if (x == y)
364 return false;
365 /* Partitions containing an SSA name result from gimple registers
366 with things like unsupported modes. They are top-level and
367 hence conflict with everything else. */
368 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
369 return true;
371 if (!a->conflicts || !b->conflicts)
372 return false;
373 return bitmap_bit_p (a->conflicts, y);
376 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
377 enter its partition number into bitmap DATA. */
379 static bool
380 visit_op (gimple, tree op, tree, void *data)
382 bitmap active = (bitmap)data;
383 op = get_base_address (op);
384 if (op
385 && DECL_P (op)
386 && DECL_RTL_IF_SET (op) == pc_rtx)
388 size_t *v = decl_to_stack_part->get (op);
389 if (v)
390 bitmap_set_bit (active, *v);
392 return false;
395 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
396 record conflicts between it and all currently active other partitions
397 from bitmap DATA. */
399 static bool
400 visit_conflict (gimple, tree op, tree, void *data)
402 bitmap active = (bitmap)data;
403 op = get_base_address (op);
404 if (op
405 && DECL_P (op)
406 && DECL_RTL_IF_SET (op) == pc_rtx)
408 size_t *v = decl_to_stack_part->get (op);
409 if (v && bitmap_set_bit (active, *v))
411 size_t num = *v;
412 bitmap_iterator bi;
413 unsigned i;
414 gcc_assert (num < stack_vars_num);
415 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
416 add_stack_var_conflict (num, i);
419 return false;
422 /* Helper routine for add_scope_conflicts, calculating the active partitions
423 at the end of BB, leaving the result in WORK. We're called to generate
424 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
425 liveness. */
427 static void
428 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
430 edge e;
431 edge_iterator ei;
432 gimple_stmt_iterator gsi;
433 walk_stmt_load_store_addr_fn visit;
435 bitmap_clear (work);
436 FOR_EACH_EDGE (e, ei, bb->preds)
437 bitmap_ior_into (work, (bitmap)e->src->aux);
439 visit = visit_op;
441 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
443 gimple stmt = gsi_stmt (gsi);
444 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
446 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
448 gimple stmt = gsi_stmt (gsi);
450 if (gimple_clobber_p (stmt))
452 tree lhs = gimple_assign_lhs (stmt);
453 size_t *v;
454 /* Nested function lowering might introduce LHSs
455 that are COMPONENT_REFs. */
456 if (TREE_CODE (lhs) != VAR_DECL)
457 continue;
458 if (DECL_RTL_IF_SET (lhs) == pc_rtx
459 && (v = decl_to_stack_part->get (lhs)))
460 bitmap_clear_bit (work, *v);
462 else if (!is_gimple_debug (stmt))
464 if (for_conflict
465 && visit == visit_op)
467 /* If this is the first real instruction in this BB we need
468 to add conflicts for everything live at this point now.
469 Unlike classical liveness for named objects we can't
470 rely on seeing a def/use of the names we're interested in.
471 There might merely be indirect loads/stores. We'd not add any
472 conflicts for such partitions. */
473 bitmap_iterator bi;
474 unsigned i;
475 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
477 struct stack_var *a = &stack_vars[i];
478 if (!a->conflicts)
479 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
480 bitmap_ior_into (a->conflicts, work);
482 visit = visit_conflict;
484 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
489 /* Generate stack partition conflicts between all partitions that are
490 simultaneously live. */
492 static void
493 add_scope_conflicts (void)
495 basic_block bb;
496 bool changed;
497 bitmap work = BITMAP_ALLOC (NULL);
498 int *rpo;
499 int n_bbs;
501 /* We approximate the live range of a stack variable by taking the first
502 mention of its name as starting point(s), and by the end-of-scope
503 death clobber added by gimplify as ending point(s) of the range.
504 This overapproximates in the case we for instance moved an address-taken
505 operation upward, without also moving a dereference to it upwards.
506 But it's conservatively correct as a variable never can hold values
507 before its name is mentioned at least once.
509 We then do a mostly classical bitmap liveness algorithm. */
511 FOR_ALL_BB_FN (bb, cfun)
512 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
514 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
515 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
517 changed = true;
518 while (changed)
520 int i;
521 changed = false;
522 for (i = 0; i < n_bbs; i++)
524 bitmap active;
525 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
526 active = (bitmap)bb->aux;
527 add_scope_conflicts_1 (bb, work, false);
528 if (bitmap_ior_into (active, work))
529 changed = true;
533 FOR_EACH_BB_FN (bb, cfun)
534 add_scope_conflicts_1 (bb, work, true);
536 free (rpo);
537 BITMAP_FREE (work);
538 FOR_ALL_BB_FN (bb, cfun)
539 BITMAP_FREE (bb->aux);
542 /* A subroutine of partition_stack_vars. A comparison function for qsort,
543 sorting an array of indices by the properties of the object. */
545 static int
546 stack_var_cmp (const void *a, const void *b)
548 size_t ia = *(const size_t *)a;
549 size_t ib = *(const size_t *)b;
550 unsigned int aligna = stack_vars[ia].alignb;
551 unsigned int alignb = stack_vars[ib].alignb;
552 HOST_WIDE_INT sizea = stack_vars[ia].size;
553 HOST_WIDE_INT sizeb = stack_vars[ib].size;
554 tree decla = stack_vars[ia].decl;
555 tree declb = stack_vars[ib].decl;
556 bool largea, largeb;
557 unsigned int uida, uidb;
559 /* Primary compare on "large" alignment. Large comes first. */
560 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
561 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
562 if (largea != largeb)
563 return (int)largeb - (int)largea;
565 /* Secondary compare on size, decreasing */
566 if (sizea > sizeb)
567 return -1;
568 if (sizea < sizeb)
569 return 1;
571 /* Tertiary compare on true alignment, decreasing. */
572 if (aligna < alignb)
573 return -1;
574 if (aligna > alignb)
575 return 1;
577 /* Final compare on ID for sort stability, increasing.
578 Two SSA names are compared by their version, SSA names come before
579 non-SSA names, and two normal decls are compared by their DECL_UID. */
580 if (TREE_CODE (decla) == SSA_NAME)
582 if (TREE_CODE (declb) == SSA_NAME)
583 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
584 else
585 return -1;
587 else if (TREE_CODE (declb) == SSA_NAME)
588 return 1;
589 else
590 uida = DECL_UID (decla), uidb = DECL_UID (declb);
591 if (uida < uidb)
592 return 1;
593 if (uida > uidb)
594 return -1;
595 return 0;
598 struct part_traits : default_hashmap_traits
600 template<typename T>
601 static bool
602 is_deleted (T &e)
603 { return e.m_value == reinterpret_cast<void *> (1); }
605 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
606 template<typename T>
607 static void
608 mark_deleted (T &e)
609 { e.m_value = reinterpret_cast<T> (1); }
611 template<typename T>
612 static void
613 mark_empty (T &e)
614 { e.m_value = NULL; }
617 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
619 /* If the points-to solution *PI points to variables that are in a partition
620 together with other variables add all partition members to the pointed-to
621 variables bitmap. */
623 static void
624 add_partitioned_vars_to_ptset (struct pt_solution *pt,
625 part_hashmap *decls_to_partitions,
626 hash_set<bitmap> *visited, bitmap temp)
628 bitmap_iterator bi;
629 unsigned i;
630 bitmap *part;
632 if (pt->anything
633 || pt->vars == NULL
634 /* The pointed-to vars bitmap is shared, it is enough to
635 visit it once. */
636 || visited->add (pt->vars))
637 return;
639 bitmap_clear (temp);
641 /* By using a temporary bitmap to store all members of the partitions
642 we have to add we make sure to visit each of the partitions only
643 once. */
644 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
645 if ((!temp
646 || !bitmap_bit_p (temp, i))
647 && (part = decls_to_partitions->get (i)))
648 bitmap_ior_into (temp, *part);
649 if (!bitmap_empty_p (temp))
650 bitmap_ior_into (pt->vars, temp);
653 /* Update points-to sets based on partition info, so we can use them on RTL.
654 The bitmaps representing stack partitions will be saved until expand,
655 where partitioned decls used as bases in memory expressions will be
656 rewritten. */
658 static void
659 update_alias_info_with_stack_vars (void)
661 part_hashmap *decls_to_partitions = NULL;
662 size_t i, j;
663 tree var = NULL_TREE;
665 for (i = 0; i < stack_vars_num; i++)
667 bitmap part = NULL;
668 tree name;
669 struct ptr_info_def *pi;
671 /* Not interested in partitions with single variable. */
672 if (stack_vars[i].representative != i
673 || stack_vars[i].next == EOC)
674 continue;
676 if (!decls_to_partitions)
678 decls_to_partitions = new part_hashmap;
679 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
682 /* Create an SSA_NAME that points to the partition for use
683 as base during alias-oracle queries on RTL for bases that
684 have been partitioned. */
685 if (var == NULL_TREE)
686 var = create_tmp_var (ptr_type_node, NULL);
687 name = make_ssa_name (var, NULL);
689 /* Create bitmaps representing partitions. They will be used for
690 points-to sets later, so use GGC alloc. */
691 part = BITMAP_GGC_ALLOC ();
692 for (j = i; j != EOC; j = stack_vars[j].next)
694 tree decl = stack_vars[j].decl;
695 unsigned int uid = DECL_PT_UID (decl);
696 bitmap_set_bit (part, uid);
697 decls_to_partitions->put (uid, part);
698 cfun->gimple_df->decls_to_pointers->put (decl, name);
699 if (TREE_ADDRESSABLE (decl))
700 TREE_ADDRESSABLE (name) = 1;
703 /* Make the SSA name point to all partition members. */
704 pi = get_ptr_info (name);
705 pt_solution_set (&pi->pt, part, false);
708 /* Make all points-to sets that contain one member of a partition
709 contain all members of the partition. */
710 if (decls_to_partitions)
712 unsigned i;
713 hash_set<bitmap> visited;
714 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
716 for (i = 1; i < num_ssa_names; i++)
718 tree name = ssa_name (i);
719 struct ptr_info_def *pi;
721 if (name
722 && POINTER_TYPE_P (TREE_TYPE (name))
723 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
724 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
725 &visited, temp);
728 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
729 decls_to_partitions, &visited, temp);
731 delete decls_to_partitions;
732 BITMAP_FREE (temp);
736 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
737 partitioning algorithm. Partitions A and B are known to be non-conflicting.
738 Merge them into a single partition A. */
740 static void
741 union_stack_vars (size_t a, size_t b)
743 struct stack_var *vb = &stack_vars[b];
744 bitmap_iterator bi;
745 unsigned u;
747 gcc_assert (stack_vars[b].next == EOC);
748 /* Add B to A's partition. */
749 stack_vars[b].next = stack_vars[a].next;
750 stack_vars[b].representative = a;
751 stack_vars[a].next = b;
753 /* Update the required alignment of partition A to account for B. */
754 if (stack_vars[a].alignb < stack_vars[b].alignb)
755 stack_vars[a].alignb = stack_vars[b].alignb;
757 /* Update the interference graph and merge the conflicts. */
758 if (vb->conflicts)
760 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
761 add_stack_var_conflict (a, stack_vars[u].representative);
762 BITMAP_FREE (vb->conflicts);
766 /* A subroutine of expand_used_vars. Binpack the variables into
767 partitions constrained by the interference graph. The overall
768 algorithm used is as follows:
770 Sort the objects by size in descending order.
771 For each object A {
772 S = size(A)
773 O = 0
774 loop {
775 Look for the largest non-conflicting object B with size <= S.
776 UNION (A, B)
781 static void
782 partition_stack_vars (void)
784 size_t si, sj, n = stack_vars_num;
786 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
787 for (si = 0; si < n; ++si)
788 stack_vars_sorted[si] = si;
790 if (n == 1)
791 return;
793 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
795 for (si = 0; si < n; ++si)
797 size_t i = stack_vars_sorted[si];
798 unsigned int ialign = stack_vars[i].alignb;
799 HOST_WIDE_INT isize = stack_vars[i].size;
801 /* Ignore objects that aren't partition representatives. If we
802 see a var that is not a partition representative, it must
803 have been merged earlier. */
804 if (stack_vars[i].representative != i)
805 continue;
807 for (sj = si + 1; sj < n; ++sj)
809 size_t j = stack_vars_sorted[sj];
810 unsigned int jalign = stack_vars[j].alignb;
811 HOST_WIDE_INT jsize = stack_vars[j].size;
813 /* Ignore objects that aren't partition representatives. */
814 if (stack_vars[j].representative != j)
815 continue;
817 /* Do not mix objects of "small" (supported) alignment
818 and "large" (unsupported) alignment. */
819 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
820 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
821 break;
823 /* For Address Sanitizer do not mix objects with different
824 sizes, as the shorter vars wouldn't be adequately protected.
825 Don't do that for "large" (unsupported) alignment objects,
826 those aren't protected anyway. */
827 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
828 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
829 break;
831 /* Ignore conflicting objects. */
832 if (stack_var_conflict_p (i, j))
833 continue;
835 /* UNION the objects, placing J at OFFSET. */
836 union_stack_vars (i, j);
840 update_alias_info_with_stack_vars ();
843 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
845 static void
846 dump_stack_var_partition (void)
848 size_t si, i, j, n = stack_vars_num;
850 for (si = 0; si < n; ++si)
852 i = stack_vars_sorted[si];
854 /* Skip variables that aren't partition representatives, for now. */
855 if (stack_vars[i].representative != i)
856 continue;
858 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
859 " align %u\n", (unsigned long) i, stack_vars[i].size,
860 stack_vars[i].alignb);
862 for (j = i; j != EOC; j = stack_vars[j].next)
864 fputc ('\t', dump_file);
865 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
867 fputc ('\n', dump_file);
871 /* Assign rtl to DECL at BASE + OFFSET. */
873 static void
874 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
875 HOST_WIDE_INT offset)
877 unsigned align;
878 rtx x;
880 /* If this fails, we've overflowed the stack frame. Error nicely? */
881 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
883 x = plus_constant (Pmode, base, offset);
884 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
886 if (TREE_CODE (decl) != SSA_NAME)
888 /* Set alignment we actually gave this decl if it isn't an SSA name.
889 If it is we generate stack slots only accidentally so it isn't as
890 important, we'll simply use the alignment that is already set. */
891 if (base == virtual_stack_vars_rtx)
892 offset -= frame_phase;
893 align = offset & -offset;
894 align *= BITS_PER_UNIT;
895 if (align == 0 || align > base_align)
896 align = base_align;
898 /* One would think that we could assert that we're not decreasing
899 alignment here, but (at least) the i386 port does exactly this
900 via the MINIMUM_ALIGNMENT hook. */
902 DECL_ALIGN (decl) = align;
903 DECL_USER_ALIGN (decl) = 0;
906 set_mem_attributes (x, SSAVAR (decl), true);
907 set_rtl (decl, x);
910 struct stack_vars_data
912 /* Vector of offset pairs, always end of some padding followed
913 by start of the padding that needs Address Sanitizer protection.
914 The vector is in reversed, highest offset pairs come first. */
915 vec<HOST_WIDE_INT> asan_vec;
917 /* Vector of partition representative decls in between the paddings. */
918 vec<tree> asan_decl_vec;
920 /* Base pseudo register for Address Sanitizer protected automatic vars. */
921 rtx asan_base;
923 /* Alignment needed for the Address Sanitizer protected automatic vars. */
924 unsigned int asan_alignb;
927 /* A subroutine of expand_used_vars. Give each partition representative
928 a unique location within the stack frame. Update each partition member
929 with that location. */
931 static void
932 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
934 size_t si, i, j, n = stack_vars_num;
935 HOST_WIDE_INT large_size = 0, large_alloc = 0;
936 rtx large_base = NULL;
937 unsigned large_align = 0;
938 tree decl;
940 /* Determine if there are any variables requiring "large" alignment.
941 Since these are dynamically allocated, we only process these if
942 no predicate involved. */
943 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
944 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
946 /* Find the total size of these variables. */
947 for (si = 0; si < n; ++si)
949 unsigned alignb;
951 i = stack_vars_sorted[si];
952 alignb = stack_vars[i].alignb;
954 /* Stop when we get to the first decl with "small" alignment. */
955 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
956 break;
958 /* Skip variables that aren't partition representatives. */
959 if (stack_vars[i].representative != i)
960 continue;
962 /* Skip variables that have already had rtl assigned. See also
963 add_stack_var where we perpetrate this pc_rtx hack. */
964 decl = stack_vars[i].decl;
965 if ((TREE_CODE (decl) == SSA_NAME
966 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
967 : DECL_RTL (decl)) != pc_rtx)
968 continue;
970 large_size += alignb - 1;
971 large_size &= -(HOST_WIDE_INT)alignb;
972 large_size += stack_vars[i].size;
975 /* If there were any, allocate space. */
976 if (large_size > 0)
977 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
978 large_align, true);
981 for (si = 0; si < n; ++si)
983 rtx base;
984 unsigned base_align, alignb;
985 HOST_WIDE_INT offset;
987 i = stack_vars_sorted[si];
989 /* Skip variables that aren't partition representatives, for now. */
990 if (stack_vars[i].representative != i)
991 continue;
993 /* Skip variables that have already had rtl assigned. See also
994 add_stack_var where we perpetrate this pc_rtx hack. */
995 decl = stack_vars[i].decl;
996 if ((TREE_CODE (decl) == SSA_NAME
997 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
998 : DECL_RTL (decl)) != pc_rtx)
999 continue;
1001 /* Check the predicate to see whether this variable should be
1002 allocated in this pass. */
1003 if (pred && !pred (i))
1004 continue;
1006 alignb = stack_vars[i].alignb;
1007 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1009 base = virtual_stack_vars_rtx;
1010 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1012 HOST_WIDE_INT prev_offset = frame_offset;
1013 tree repr_decl = NULL_TREE;
1015 offset
1016 = alloc_stack_frame_space (stack_vars[i].size
1017 + ASAN_RED_ZONE_SIZE,
1018 MAX (alignb, ASAN_RED_ZONE_SIZE));
1019 data->asan_vec.safe_push (prev_offset);
1020 data->asan_vec.safe_push (offset + stack_vars[i].size);
1021 /* Find best representative of the partition.
1022 Prefer those with DECL_NAME, even better
1023 satisfying asan_protect_stack_decl predicate. */
1024 for (j = i; j != EOC; j = stack_vars[j].next)
1025 if (asan_protect_stack_decl (stack_vars[j].decl)
1026 && DECL_NAME (stack_vars[j].decl))
1028 repr_decl = stack_vars[j].decl;
1029 break;
1031 else if (repr_decl == NULL_TREE
1032 && DECL_P (stack_vars[j].decl)
1033 && DECL_NAME (stack_vars[j].decl))
1034 repr_decl = stack_vars[j].decl;
1035 if (repr_decl == NULL_TREE)
1036 repr_decl = stack_vars[i].decl;
1037 data->asan_decl_vec.safe_push (repr_decl);
1038 data->asan_alignb = MAX (data->asan_alignb, alignb);
1039 if (data->asan_base == NULL)
1040 data->asan_base = gen_reg_rtx (Pmode);
1041 base = data->asan_base;
1043 if (!STRICT_ALIGNMENT)
1044 base_align = crtl->max_used_stack_slot_alignment;
1045 else
1046 base_align = MAX (crtl->max_used_stack_slot_alignment,
1047 GET_MODE_ALIGNMENT (SImode)
1048 << ASAN_SHADOW_SHIFT);
1050 else
1052 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1053 base_align = crtl->max_used_stack_slot_alignment;
1056 else
1058 /* Large alignment is only processed in the last pass. */
1059 if (pred)
1060 continue;
1061 gcc_assert (large_base != NULL);
1063 large_alloc += alignb - 1;
1064 large_alloc &= -(HOST_WIDE_INT)alignb;
1065 offset = large_alloc;
1066 large_alloc += stack_vars[i].size;
1068 base = large_base;
1069 base_align = large_align;
1072 /* Create rtl for each variable based on their location within the
1073 partition. */
1074 for (j = i; j != EOC; j = stack_vars[j].next)
1076 expand_one_stack_var_at (stack_vars[j].decl,
1077 base, base_align,
1078 offset);
1082 gcc_assert (large_alloc == large_size);
1085 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1086 static HOST_WIDE_INT
1087 account_stack_vars (void)
1089 size_t si, j, i, n = stack_vars_num;
1090 HOST_WIDE_INT size = 0;
1092 for (si = 0; si < n; ++si)
1094 i = stack_vars_sorted[si];
1096 /* Skip variables that aren't partition representatives, for now. */
1097 if (stack_vars[i].representative != i)
1098 continue;
1100 size += stack_vars[i].size;
1101 for (j = i; j != EOC; j = stack_vars[j].next)
1102 set_rtl (stack_vars[j].decl, NULL);
1104 return size;
1107 /* A subroutine of expand_one_var. Called to immediately assign rtl
1108 to a variable to be allocated in the stack frame. */
1110 static void
1111 expand_one_stack_var (tree var)
1113 HOST_WIDE_INT size, offset;
1114 unsigned byte_align;
1116 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1117 byte_align = align_local_variable (SSAVAR (var));
1119 /* We handle highly aligned variables in expand_stack_vars. */
1120 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1122 offset = alloc_stack_frame_space (size, byte_align);
1124 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1125 crtl->max_used_stack_slot_alignment, offset);
1128 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1129 that will reside in a hard register. */
1131 static void
1132 expand_one_hard_reg_var (tree var)
1134 rest_of_decl_compilation (var, 0, 0);
1137 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1138 that will reside in a pseudo register. */
1140 static void
1141 expand_one_register_var (tree var)
1143 tree decl = SSAVAR (var);
1144 tree type = TREE_TYPE (decl);
1145 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1146 rtx x = gen_reg_rtx (reg_mode);
1148 set_rtl (var, x);
1150 /* Note if the object is a user variable. */
1151 if (!DECL_ARTIFICIAL (decl))
1152 mark_user_reg (x);
1154 if (POINTER_TYPE_P (type))
1155 mark_reg_pointer (x, get_pointer_alignment (var));
1158 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1159 has some associated error, e.g. its type is error-mark. We just need
1160 to pick something that won't crash the rest of the compiler. */
1162 static void
1163 expand_one_error_var (tree var)
1165 enum machine_mode mode = DECL_MODE (var);
1166 rtx x;
1168 if (mode == BLKmode)
1169 x = gen_rtx_MEM (BLKmode, const0_rtx);
1170 else if (mode == VOIDmode)
1171 x = const0_rtx;
1172 else
1173 x = gen_reg_rtx (mode);
1175 SET_DECL_RTL (var, x);
1178 /* A subroutine of expand_one_var. VAR is a variable that will be
1179 allocated to the local stack frame. Return true if we wish to
1180 add VAR to STACK_VARS so that it will be coalesced with other
1181 variables. Return false to allocate VAR immediately.
1183 This function is used to reduce the number of variables considered
1184 for coalescing, which reduces the size of the quadratic problem. */
1186 static bool
1187 defer_stack_allocation (tree var, bool toplevel)
1189 /* Whether the variable is small enough for immediate allocation not to be
1190 a problem with regard to the frame size. */
1191 bool smallish
1192 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1193 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1195 /* If stack protection is enabled, *all* stack variables must be deferred,
1196 so that we can re-order the strings to the top of the frame.
1197 Similarly for Address Sanitizer. */
1198 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1199 return true;
1201 /* We handle "large" alignment via dynamic allocation. We want to handle
1202 this extra complication in only one place, so defer them. */
1203 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1204 return true;
1206 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1207 might be detached from their block and appear at toplevel when we reach
1208 here. We want to coalesce them with variables from other blocks when
1209 the immediate contribution to the frame size would be noticeable. */
1210 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1211 return true;
1213 /* Variables declared in the outermost scope automatically conflict
1214 with every other variable. The only reason to want to defer them
1215 at all is that, after sorting, we can more efficiently pack
1216 small variables in the stack frame. Continue to defer at -O2. */
1217 if (toplevel && optimize < 2)
1218 return false;
1220 /* Without optimization, *most* variables are allocated from the
1221 stack, which makes the quadratic problem large exactly when we
1222 want compilation to proceed as quickly as possible. On the
1223 other hand, we don't want the function's stack frame size to
1224 get completely out of hand. So we avoid adding scalars and
1225 "small" aggregates to the list at all. */
1226 if (optimize == 0 && smallish)
1227 return false;
1229 return true;
1232 /* A subroutine of expand_used_vars. Expand one variable according to
1233 its flavor. Variables to be placed on the stack are not actually
1234 expanded yet, merely recorded.
1235 When REALLY_EXPAND is false, only add stack values to be allocated.
1236 Return stack usage this variable is supposed to take.
1239 static HOST_WIDE_INT
1240 expand_one_var (tree var, bool toplevel, bool really_expand)
1242 unsigned int align = BITS_PER_UNIT;
1243 tree origvar = var;
1245 var = SSAVAR (var);
1247 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1249 /* Because we don't know if VAR will be in register or on stack,
1250 we conservatively assume it will be on stack even if VAR is
1251 eventually put into register after RA pass. For non-automatic
1252 variables, which won't be on stack, we collect alignment of
1253 type and ignore user specified alignment. Similarly for
1254 SSA_NAMEs for which use_register_for_decl returns true. */
1255 if (TREE_STATIC (var)
1256 || DECL_EXTERNAL (var)
1257 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1258 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1259 TYPE_MODE (TREE_TYPE (var)),
1260 TYPE_ALIGN (TREE_TYPE (var)));
1261 else if (DECL_HAS_VALUE_EXPR_P (var)
1262 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1263 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1264 or variables which were assigned a stack slot already by
1265 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1266 changed from the offset chosen to it. */
1267 align = crtl->stack_alignment_estimated;
1268 else
1269 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1271 /* If the variable alignment is very large we'll dynamicaly allocate
1272 it, which means that in-frame portion is just a pointer. */
1273 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1274 align = POINTER_SIZE;
1277 if (SUPPORTS_STACK_ALIGNMENT
1278 && crtl->stack_alignment_estimated < align)
1280 /* stack_alignment_estimated shouldn't change after stack
1281 realign decision made */
1282 gcc_assert (!crtl->stack_realign_processed);
1283 crtl->stack_alignment_estimated = align;
1286 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1287 So here we only make sure stack_alignment_needed >= align. */
1288 if (crtl->stack_alignment_needed < align)
1289 crtl->stack_alignment_needed = align;
1290 if (crtl->max_used_stack_slot_alignment < align)
1291 crtl->max_used_stack_slot_alignment = align;
1293 if (TREE_CODE (origvar) == SSA_NAME)
1295 gcc_assert (TREE_CODE (var) != VAR_DECL
1296 || (!DECL_EXTERNAL (var)
1297 && !DECL_HAS_VALUE_EXPR_P (var)
1298 && !TREE_STATIC (var)
1299 && TREE_TYPE (var) != error_mark_node
1300 && !DECL_HARD_REGISTER (var)
1301 && really_expand));
1303 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1305 else if (DECL_EXTERNAL (var))
1307 else if (DECL_HAS_VALUE_EXPR_P (var))
1309 else if (TREE_STATIC (var))
1311 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1313 else if (TREE_TYPE (var) == error_mark_node)
1315 if (really_expand)
1316 expand_one_error_var (var);
1318 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1320 if (really_expand)
1322 expand_one_hard_reg_var (var);
1323 if (!DECL_HARD_REGISTER (var))
1324 /* Invalid register specification. */
1325 expand_one_error_var (var);
1328 else if (use_register_for_decl (var))
1330 if (really_expand)
1331 expand_one_register_var (origvar);
1333 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1335 /* Reject variables which cover more than half of the address-space. */
1336 if (really_expand)
1338 error ("size of variable %q+D is too large", var);
1339 expand_one_error_var (var);
1342 else if (defer_stack_allocation (var, toplevel))
1343 add_stack_var (origvar);
1344 else
1346 if (really_expand)
1347 expand_one_stack_var (origvar);
1348 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1350 return 0;
1353 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1354 expanding variables. Those variables that can be put into registers
1355 are allocated pseudos; those that can't are put on the stack.
1357 TOPLEVEL is true if this is the outermost BLOCK. */
1359 static void
1360 expand_used_vars_for_block (tree block, bool toplevel)
1362 tree t;
1364 /* Expand all variables at this level. */
1365 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1366 if (TREE_USED (t)
1367 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1368 || !DECL_NONSHAREABLE (t)))
1369 expand_one_var (t, toplevel, true);
1371 /* Expand all variables at containing levels. */
1372 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1373 expand_used_vars_for_block (t, false);
1376 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1377 and clear TREE_USED on all local variables. */
1379 static void
1380 clear_tree_used (tree block)
1382 tree t;
1384 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1385 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1386 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1387 || !DECL_NONSHAREABLE (t))
1388 TREE_USED (t) = 0;
1390 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1391 clear_tree_used (t);
1394 enum {
1395 SPCT_FLAG_DEFAULT = 1,
1396 SPCT_FLAG_ALL = 2,
1397 SPCT_FLAG_STRONG = 3
1400 /* Examine TYPE and determine a bit mask of the following features. */
1402 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1403 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1404 #define SPCT_HAS_ARRAY 4
1405 #define SPCT_HAS_AGGREGATE 8
1407 static unsigned int
1408 stack_protect_classify_type (tree type)
1410 unsigned int ret = 0;
1411 tree t;
1413 switch (TREE_CODE (type))
1415 case ARRAY_TYPE:
1416 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1417 if (t == char_type_node
1418 || t == signed_char_type_node
1419 || t == unsigned_char_type_node)
1421 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1422 unsigned HOST_WIDE_INT len;
1424 if (!TYPE_SIZE_UNIT (type)
1425 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1426 len = max;
1427 else
1428 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1430 if (len < max)
1431 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1432 else
1433 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1435 else
1436 ret = SPCT_HAS_ARRAY;
1437 break;
1439 case UNION_TYPE:
1440 case QUAL_UNION_TYPE:
1441 case RECORD_TYPE:
1442 ret = SPCT_HAS_AGGREGATE;
1443 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1444 if (TREE_CODE (t) == FIELD_DECL)
1445 ret |= stack_protect_classify_type (TREE_TYPE (t));
1446 break;
1448 default:
1449 break;
1452 return ret;
1455 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1456 part of the local stack frame. Remember if we ever return nonzero for
1457 any variable in this function. The return value is the phase number in
1458 which the variable should be allocated. */
1460 static int
1461 stack_protect_decl_phase (tree decl)
1463 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1464 int ret = 0;
1466 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1467 has_short_buffer = true;
1469 if (flag_stack_protect == SPCT_FLAG_ALL
1470 || flag_stack_protect == SPCT_FLAG_STRONG)
1472 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1473 && !(bits & SPCT_HAS_AGGREGATE))
1474 ret = 1;
1475 else if (bits & SPCT_HAS_ARRAY)
1476 ret = 2;
1478 else
1479 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1481 if (ret)
1482 has_protected_decls = true;
1484 return ret;
1487 /* Two helper routines that check for phase 1 and phase 2. These are used
1488 as callbacks for expand_stack_vars. */
1490 static bool
1491 stack_protect_decl_phase_1 (size_t i)
1493 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1496 static bool
1497 stack_protect_decl_phase_2 (size_t i)
1499 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1502 /* And helper function that checks for asan phase (with stack protector
1503 it is phase 3). This is used as callback for expand_stack_vars.
1504 Returns true if any of the vars in the partition need to be protected. */
1506 static bool
1507 asan_decl_phase_3 (size_t i)
1509 while (i != EOC)
1511 if (asan_protect_stack_decl (stack_vars[i].decl))
1512 return true;
1513 i = stack_vars[i].next;
1515 return false;
1518 /* Ensure that variables in different stack protection phases conflict
1519 so that they are not merged and share the same stack slot. */
1521 static void
1522 add_stack_protection_conflicts (void)
1524 size_t i, j, n = stack_vars_num;
1525 unsigned char *phase;
1527 phase = XNEWVEC (unsigned char, n);
1528 for (i = 0; i < n; ++i)
1529 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1531 for (i = 0; i < n; ++i)
1533 unsigned char ph_i = phase[i];
1534 for (j = i + 1; j < n; ++j)
1535 if (ph_i != phase[j])
1536 add_stack_var_conflict (i, j);
1539 XDELETEVEC (phase);
1542 /* Create a decl for the guard at the top of the stack frame. */
1544 static void
1545 create_stack_guard (void)
1547 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1548 VAR_DECL, NULL, ptr_type_node);
1549 TREE_THIS_VOLATILE (guard) = 1;
1550 TREE_USED (guard) = 1;
1551 expand_one_stack_var (guard);
1552 crtl->stack_protect_guard = guard;
1555 /* Prepare for expanding variables. */
1556 static void
1557 init_vars_expansion (void)
1559 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1560 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1562 /* A map from decl to stack partition. */
1563 decl_to_stack_part = new hash_map<tree, size_t>;
1565 /* Initialize local stack smashing state. */
1566 has_protected_decls = false;
1567 has_short_buffer = false;
1570 /* Free up stack variable graph data. */
1571 static void
1572 fini_vars_expansion (void)
1574 bitmap_obstack_release (&stack_var_bitmap_obstack);
1575 if (stack_vars)
1576 XDELETEVEC (stack_vars);
1577 if (stack_vars_sorted)
1578 XDELETEVEC (stack_vars_sorted);
1579 stack_vars = NULL;
1580 stack_vars_sorted = NULL;
1581 stack_vars_alloc = stack_vars_num = 0;
1582 delete decl_to_stack_part;
1583 decl_to_stack_part = NULL;
1586 /* Make a fair guess for the size of the stack frame of the function
1587 in NODE. This doesn't have to be exact, the result is only used in
1588 the inline heuristics. So we don't want to run the full stack var
1589 packing algorithm (which is quadratic in the number of stack vars).
1590 Instead, we calculate the total size of all stack vars. This turns
1591 out to be a pretty fair estimate -- packing of stack vars doesn't
1592 happen very often. */
1594 HOST_WIDE_INT
1595 estimated_stack_frame_size (struct cgraph_node *node)
1597 HOST_WIDE_INT size = 0;
1598 size_t i;
1599 tree var;
1600 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1602 push_cfun (fn);
1604 init_vars_expansion ();
1606 FOR_EACH_LOCAL_DECL (fn, i, var)
1607 if (auto_var_in_fn_p (var, fn->decl))
1608 size += expand_one_var (var, true, false);
1610 if (stack_vars_num > 0)
1612 /* Fake sorting the stack vars for account_stack_vars (). */
1613 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1614 for (i = 0; i < stack_vars_num; ++i)
1615 stack_vars_sorted[i] = i;
1616 size += account_stack_vars ();
1619 fini_vars_expansion ();
1620 pop_cfun ();
1621 return size;
1624 /* Helper routine to check if a record or union contains an array field. */
1626 static int
1627 record_or_union_type_has_array_p (const_tree tree_type)
1629 tree fields = TYPE_FIELDS (tree_type);
1630 tree f;
1632 for (f = fields; f; f = DECL_CHAIN (f))
1633 if (TREE_CODE (f) == FIELD_DECL)
1635 tree field_type = TREE_TYPE (f);
1636 if (RECORD_OR_UNION_TYPE_P (field_type)
1637 && record_or_union_type_has_array_p (field_type))
1638 return 1;
1639 if (TREE_CODE (field_type) == ARRAY_TYPE)
1640 return 1;
1642 return 0;
1645 /* Check if the current function has local referenced variables that
1646 have their addresses taken, contain an array, or are arrays. */
1648 static bool
1649 stack_protect_decl_p ()
1651 unsigned i;
1652 tree var;
1654 FOR_EACH_LOCAL_DECL (cfun, i, var)
1655 if (!is_global_var (var))
1657 tree var_type = TREE_TYPE (var);
1658 if (TREE_CODE (var) == VAR_DECL
1659 && (TREE_CODE (var_type) == ARRAY_TYPE
1660 || TREE_ADDRESSABLE (var)
1661 || (RECORD_OR_UNION_TYPE_P (var_type)
1662 && record_or_union_type_has_array_p (var_type))))
1663 return true;
1665 return false;
1668 /* Check if the current function has calls that use a return slot. */
1670 static bool
1671 stack_protect_return_slot_p ()
1673 basic_block bb;
1675 FOR_ALL_BB_FN (bb, cfun)
1676 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1677 !gsi_end_p (gsi); gsi_next (&gsi))
1679 gimple stmt = gsi_stmt (gsi);
1680 /* This assumes that calls to internal-only functions never
1681 use a return slot. */
1682 if (is_gimple_call (stmt)
1683 && !gimple_call_internal_p (stmt)
1684 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1685 gimple_call_fndecl (stmt)))
1686 return true;
1688 return false;
1691 /* Expand all variables used in the function. */
1693 static rtx_insn *
1694 expand_used_vars (void)
1696 tree var, outer_block = DECL_INITIAL (current_function_decl);
1697 vec<tree> maybe_local_decls = vNULL;
1698 rtx_insn *var_end_seq = NULL;
1699 unsigned i;
1700 unsigned len;
1701 bool gen_stack_protect_signal = false;
1703 /* Compute the phase of the stack frame for this function. */
1705 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1706 int off = STARTING_FRAME_OFFSET % align;
1707 frame_phase = off ? align - off : 0;
1710 /* Set TREE_USED on all variables in the local_decls. */
1711 FOR_EACH_LOCAL_DECL (cfun, i, var)
1712 TREE_USED (var) = 1;
1713 /* Clear TREE_USED on all variables associated with a block scope. */
1714 clear_tree_used (DECL_INITIAL (current_function_decl));
1716 init_vars_expansion ();
1718 hash_map<tree, tree> ssa_name_decls;
1719 for (i = 0; i < SA.map->num_partitions; i++)
1721 tree var = partition_to_var (SA.map, i);
1723 gcc_assert (!virtual_operand_p (var));
1725 /* Assign decls to each SSA name partition, share decls for partitions
1726 we could have coalesced (those with the same type). */
1727 if (SSA_NAME_VAR (var) == NULL_TREE)
1729 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1730 if (!*slot)
1731 *slot = create_tmp_reg (TREE_TYPE (var), NULL);
1732 replace_ssa_name_symbol (var, *slot);
1735 /* Always allocate space for partitions based on VAR_DECLs. But for
1736 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1737 debug info, there is no need to do so if optimization is disabled
1738 because all the SSA_NAMEs based on these DECLs have been coalesced
1739 into a single partition, which is thus assigned the canonical RTL
1740 location of the DECLs. If in_lto_p, we can't rely on optimize,
1741 a function could be compiled with -O1 -flto first and only the
1742 link performed at -O0. */
1743 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1744 expand_one_var (var, true, true);
1745 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1747 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1748 contain the default def (representing the parm or result itself)
1749 we don't do anything here. But those which don't contain the
1750 default def (representing a temporary based on the parm/result)
1751 we need to allocate space just like for normal VAR_DECLs. */
1752 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1754 expand_one_var (var, true, true);
1755 gcc_assert (SA.partition_to_pseudo[i]);
1760 if (flag_stack_protect == SPCT_FLAG_STRONG)
1761 gen_stack_protect_signal
1762 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1764 /* At this point all variables on the local_decls with TREE_USED
1765 set are not associated with any block scope. Lay them out. */
1767 len = vec_safe_length (cfun->local_decls);
1768 FOR_EACH_LOCAL_DECL (cfun, i, var)
1770 bool expand_now = false;
1772 /* Expanded above already. */
1773 if (is_gimple_reg (var))
1775 TREE_USED (var) = 0;
1776 goto next;
1778 /* We didn't set a block for static or extern because it's hard
1779 to tell the difference between a global variable (re)declared
1780 in a local scope, and one that's really declared there to
1781 begin with. And it doesn't really matter much, since we're
1782 not giving them stack space. Expand them now. */
1783 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1784 expand_now = true;
1786 /* Expand variables not associated with any block now. Those created by
1787 the optimizers could be live anywhere in the function. Those that
1788 could possibly have been scoped originally and detached from their
1789 block will have their allocation deferred so we coalesce them with
1790 others when optimization is enabled. */
1791 else if (TREE_USED (var))
1792 expand_now = true;
1794 /* Finally, mark all variables on the list as used. We'll use
1795 this in a moment when we expand those associated with scopes. */
1796 TREE_USED (var) = 1;
1798 if (expand_now)
1799 expand_one_var (var, true, true);
1801 next:
1802 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1804 rtx rtl = DECL_RTL_IF_SET (var);
1806 /* Keep artificial non-ignored vars in cfun->local_decls
1807 chain until instantiate_decls. */
1808 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1809 add_local_decl (cfun, var);
1810 else if (rtl == NULL_RTX)
1811 /* If rtl isn't set yet, which can happen e.g. with
1812 -fstack-protector, retry before returning from this
1813 function. */
1814 maybe_local_decls.safe_push (var);
1818 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1820 +-----------------+-----------------+
1821 | ...processed... | ...duplicates...|
1822 +-----------------+-----------------+
1824 +-- LEN points here.
1826 We just want the duplicates, as those are the artificial
1827 non-ignored vars that we want to keep until instantiate_decls.
1828 Move them down and truncate the array. */
1829 if (!vec_safe_is_empty (cfun->local_decls))
1830 cfun->local_decls->block_remove (0, len);
1832 /* At this point, all variables within the block tree with TREE_USED
1833 set are actually used by the optimized function. Lay them out. */
1834 expand_used_vars_for_block (outer_block, true);
1836 if (stack_vars_num > 0)
1838 add_scope_conflicts ();
1840 /* If stack protection is enabled, we don't share space between
1841 vulnerable data and non-vulnerable data. */
1842 if (flag_stack_protect)
1843 add_stack_protection_conflicts ();
1845 /* Now that we have collected all stack variables, and have computed a
1846 minimal interference graph, attempt to save some stack space. */
1847 partition_stack_vars ();
1848 if (dump_file)
1849 dump_stack_var_partition ();
1852 switch (flag_stack_protect)
1854 case SPCT_FLAG_ALL:
1855 create_stack_guard ();
1856 break;
1858 case SPCT_FLAG_STRONG:
1859 if (gen_stack_protect_signal
1860 || cfun->calls_alloca || has_protected_decls)
1861 create_stack_guard ();
1862 break;
1864 case SPCT_FLAG_DEFAULT:
1865 if (cfun->calls_alloca || has_protected_decls)
1866 create_stack_guard ();
1867 break;
1869 default:
1873 /* Assign rtl to each variable based on these partitions. */
1874 if (stack_vars_num > 0)
1876 struct stack_vars_data data;
1878 data.asan_vec = vNULL;
1879 data.asan_decl_vec = vNULL;
1880 data.asan_base = NULL_RTX;
1881 data.asan_alignb = 0;
1883 /* Reorder decls to be protected by iterating over the variables
1884 array multiple times, and allocating out of each phase in turn. */
1885 /* ??? We could probably integrate this into the qsort we did
1886 earlier, such that we naturally see these variables first,
1887 and thus naturally allocate things in the right order. */
1888 if (has_protected_decls)
1890 /* Phase 1 contains only character arrays. */
1891 expand_stack_vars (stack_protect_decl_phase_1, &data);
1893 /* Phase 2 contains other kinds of arrays. */
1894 if (flag_stack_protect == 2)
1895 expand_stack_vars (stack_protect_decl_phase_2, &data);
1898 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1899 /* Phase 3, any partitions that need asan protection
1900 in addition to phase 1 and 2. */
1901 expand_stack_vars (asan_decl_phase_3, &data);
1903 if (!data.asan_vec.is_empty ())
1905 HOST_WIDE_INT prev_offset = frame_offset;
1906 HOST_WIDE_INT offset, sz, redzonesz;
1907 redzonesz = ASAN_RED_ZONE_SIZE;
1908 sz = data.asan_vec[0] - prev_offset;
1909 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1910 && data.asan_alignb <= 4096
1911 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1912 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1913 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1914 offset
1915 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1916 data.asan_vec.safe_push (prev_offset);
1917 data.asan_vec.safe_push (offset);
1918 /* Leave space for alignment if STRICT_ALIGNMENT. */
1919 if (STRICT_ALIGNMENT)
1920 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1921 << ASAN_SHADOW_SHIFT)
1922 / BITS_PER_UNIT, 1);
1924 var_end_seq
1925 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1926 data.asan_base,
1927 data.asan_alignb,
1928 data.asan_vec.address (),
1929 data.asan_decl_vec.address (),
1930 data.asan_vec.length ());
1933 expand_stack_vars (NULL, &data);
1935 data.asan_vec.release ();
1936 data.asan_decl_vec.release ();
1939 fini_vars_expansion ();
1941 /* If there were any artificial non-ignored vars without rtl
1942 found earlier, see if deferred stack allocation hasn't assigned
1943 rtl to them. */
1944 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1946 rtx rtl = DECL_RTL_IF_SET (var);
1948 /* Keep artificial non-ignored vars in cfun->local_decls
1949 chain until instantiate_decls. */
1950 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1951 add_local_decl (cfun, var);
1953 maybe_local_decls.release ();
1955 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1956 if (STACK_ALIGNMENT_NEEDED)
1958 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1959 if (!FRAME_GROWS_DOWNWARD)
1960 frame_offset += align - 1;
1961 frame_offset &= -align;
1964 return var_end_seq;
1968 /* If we need to produce a detailed dump, print the tree representation
1969 for STMT to the dump file. SINCE is the last RTX after which the RTL
1970 generated for STMT should have been appended. */
1972 static void
1973 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
1975 if (dump_file && (dump_flags & TDF_DETAILS))
1977 fprintf (dump_file, "\n;; ");
1978 print_gimple_stmt (dump_file, stmt, 0,
1979 TDF_SLIM | (dump_flags & TDF_LINENO));
1980 fprintf (dump_file, "\n");
1982 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1986 /* Maps the blocks that do not contain tree labels to rtx labels. */
1988 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
1990 /* Returns the label_rtx expression for a label starting basic block BB. */
1992 static rtx
1993 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1995 gimple_stmt_iterator gsi;
1996 tree lab;
1997 gimple lab_stmt;
1999 if (bb->flags & BB_RTL)
2000 return block_label (bb);
2002 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2003 if (elt)
2004 return *elt;
2006 /* Find the tree label if it is present. */
2008 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2010 lab_stmt = gsi_stmt (gsi);
2011 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
2012 break;
2014 lab = gimple_label_label (lab_stmt);
2015 if (DECL_NONLOCAL (lab))
2016 break;
2018 return label_rtx (lab);
2021 rtx_code_label *l = gen_label_rtx ();
2022 lab_rtx_for_bb->put (bb, l);
2023 return l;
2027 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2028 of a basic block where we just expanded the conditional at the end,
2029 possibly clean up the CFG and instruction sequence. LAST is the
2030 last instruction before the just emitted jump sequence. */
2032 static void
2033 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2035 /* Special case: when jumpif decides that the condition is
2036 trivial it emits an unconditional jump (and the necessary
2037 barrier). But we still have two edges, the fallthru one is
2038 wrong. purge_dead_edges would clean this up later. Unfortunately
2039 we have to insert insns (and split edges) before
2040 find_many_sub_basic_blocks and hence before purge_dead_edges.
2041 But splitting edges might create new blocks which depend on the
2042 fact that if there are two edges there's no barrier. So the
2043 barrier would get lost and verify_flow_info would ICE. Instead
2044 of auditing all edge splitters to care for the barrier (which
2045 normally isn't there in a cleaned CFG), fix it here. */
2046 if (BARRIER_P (get_last_insn ()))
2048 rtx_insn *insn;
2049 remove_edge (e);
2050 /* Now, we have a single successor block, if we have insns to
2051 insert on the remaining edge we potentially will insert
2052 it at the end of this block (if the dest block isn't feasible)
2053 in order to avoid splitting the edge. This insertion will take
2054 place in front of the last jump. But we might have emitted
2055 multiple jumps (conditional and one unconditional) to the
2056 same destination. Inserting in front of the last one then
2057 is a problem. See PR 40021. We fix this by deleting all
2058 jumps except the last unconditional one. */
2059 insn = PREV_INSN (get_last_insn ());
2060 /* Make sure we have an unconditional jump. Otherwise we're
2061 confused. */
2062 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2063 for (insn = PREV_INSN (insn); insn != last;)
2065 insn = PREV_INSN (insn);
2066 if (JUMP_P (NEXT_INSN (insn)))
2068 if (!any_condjump_p (NEXT_INSN (insn)))
2070 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2071 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2073 delete_insn (NEXT_INSN (insn));
2079 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2080 Returns a new basic block if we've terminated the current basic
2081 block and created a new one. */
2083 static basic_block
2084 expand_gimple_cond (basic_block bb, gimple stmt)
2086 basic_block new_bb, dest;
2087 edge new_edge;
2088 edge true_edge;
2089 edge false_edge;
2090 rtx_insn *last2, *last;
2091 enum tree_code code;
2092 tree op0, op1;
2094 code = gimple_cond_code (stmt);
2095 op0 = gimple_cond_lhs (stmt);
2096 op1 = gimple_cond_rhs (stmt);
2097 /* We're sometimes presented with such code:
2098 D.123_1 = x < y;
2099 if (D.123_1 != 0)
2101 This would expand to two comparisons which then later might
2102 be cleaned up by combine. But some pattern matchers like if-conversion
2103 work better when there's only one compare, so make up for this
2104 here as special exception if TER would have made the same change. */
2105 if (SA.values
2106 && TREE_CODE (op0) == SSA_NAME
2107 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2108 && TREE_CODE (op1) == INTEGER_CST
2109 && ((gimple_cond_code (stmt) == NE_EXPR
2110 && integer_zerop (op1))
2111 || (gimple_cond_code (stmt) == EQ_EXPR
2112 && integer_onep (op1)))
2113 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2115 gimple second = SSA_NAME_DEF_STMT (op0);
2116 if (gimple_code (second) == GIMPLE_ASSIGN)
2118 enum tree_code code2 = gimple_assign_rhs_code (second);
2119 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2121 code = code2;
2122 op0 = gimple_assign_rhs1 (second);
2123 op1 = gimple_assign_rhs2 (second);
2125 /* If jumps are cheap turn some more codes into
2126 jumpy sequences. */
2127 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2129 if ((code2 == BIT_AND_EXPR
2130 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2131 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2132 || code2 == TRUTH_AND_EXPR)
2134 code = TRUTH_ANDIF_EXPR;
2135 op0 = gimple_assign_rhs1 (second);
2136 op1 = gimple_assign_rhs2 (second);
2138 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2140 code = TRUTH_ORIF_EXPR;
2141 op0 = gimple_assign_rhs1 (second);
2142 op1 = gimple_assign_rhs2 (second);
2148 last2 = last = get_last_insn ();
2150 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2151 set_curr_insn_location (gimple_location (stmt));
2153 /* These flags have no purpose in RTL land. */
2154 true_edge->flags &= ~EDGE_TRUE_VALUE;
2155 false_edge->flags &= ~EDGE_FALSE_VALUE;
2157 /* We can either have a pure conditional jump with one fallthru edge or
2158 two-way jump that needs to be decomposed into two basic blocks. */
2159 if (false_edge->dest == bb->next_bb)
2161 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2162 true_edge->probability);
2163 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2164 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2165 set_curr_insn_location (true_edge->goto_locus);
2166 false_edge->flags |= EDGE_FALLTHRU;
2167 maybe_cleanup_end_of_block (false_edge, last);
2168 return NULL;
2170 if (true_edge->dest == bb->next_bb)
2172 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2173 false_edge->probability);
2174 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2175 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2176 set_curr_insn_location (false_edge->goto_locus);
2177 true_edge->flags |= EDGE_FALLTHRU;
2178 maybe_cleanup_end_of_block (true_edge, last);
2179 return NULL;
2182 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2183 true_edge->probability);
2184 last = get_last_insn ();
2185 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2186 set_curr_insn_location (false_edge->goto_locus);
2187 emit_jump (label_rtx_for_bb (false_edge->dest));
2189 BB_END (bb) = last;
2190 if (BARRIER_P (BB_END (bb)))
2191 BB_END (bb) = PREV_INSN (BB_END (bb));
2192 update_bb_for_insn (bb);
2194 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2195 dest = false_edge->dest;
2196 redirect_edge_succ (false_edge, new_bb);
2197 false_edge->flags |= EDGE_FALLTHRU;
2198 new_bb->count = false_edge->count;
2199 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2200 add_bb_to_loop (new_bb, bb->loop_father);
2201 new_edge = make_edge (new_bb, dest, 0);
2202 new_edge->probability = REG_BR_PROB_BASE;
2203 new_edge->count = new_bb->count;
2204 if (BARRIER_P (BB_END (new_bb)))
2205 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2206 update_bb_for_insn (new_bb);
2208 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2210 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2212 set_curr_insn_location (true_edge->goto_locus);
2213 true_edge->goto_locus = curr_insn_location ();
2216 return new_bb;
2219 /* Mark all calls that can have a transaction restart. */
2221 static void
2222 mark_transaction_restart_calls (gimple stmt)
2224 struct tm_restart_node dummy;
2225 void **slot;
2227 if (!cfun->gimple_df->tm_restart)
2228 return;
2230 dummy.stmt = stmt;
2231 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2232 if (slot)
2234 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2235 tree list = n->label_or_list;
2236 rtx_insn *insn;
2238 for (insn = next_real_insn (get_last_insn ());
2239 !CALL_P (insn);
2240 insn = next_real_insn (insn))
2241 continue;
2243 if (TREE_CODE (list) == LABEL_DECL)
2244 add_reg_note (insn, REG_TM, label_rtx (list));
2245 else
2246 for (; list ; list = TREE_CHAIN (list))
2247 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2251 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2252 statement STMT. */
2254 static void
2255 expand_call_stmt (gimple stmt)
2257 tree exp, decl, lhs;
2258 bool builtin_p;
2259 size_t i;
2261 if (gimple_call_internal_p (stmt))
2263 expand_internal_call (stmt);
2264 return;
2267 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2269 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2270 decl = gimple_call_fndecl (stmt);
2271 builtin_p = decl && DECL_BUILT_IN (decl);
2273 /* If this is not a builtin function, the function type through which the
2274 call is made may be different from the type of the function. */
2275 if (!builtin_p)
2276 CALL_EXPR_FN (exp)
2277 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2278 CALL_EXPR_FN (exp));
2280 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2281 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2283 for (i = 0; i < gimple_call_num_args (stmt); i++)
2285 tree arg = gimple_call_arg (stmt, i);
2286 gimple def;
2287 /* TER addresses into arguments of builtin functions so we have a
2288 chance to infer more correct alignment information. See PR39954. */
2289 if (builtin_p
2290 && TREE_CODE (arg) == SSA_NAME
2291 && (def = get_gimple_for_ssa_name (arg))
2292 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2293 arg = gimple_assign_rhs1 (def);
2294 CALL_EXPR_ARG (exp, i) = arg;
2297 if (gimple_has_side_effects (stmt))
2298 TREE_SIDE_EFFECTS (exp) = 1;
2300 if (gimple_call_nothrow_p (stmt))
2301 TREE_NOTHROW (exp) = 1;
2303 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2304 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2305 if (decl
2306 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2307 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2308 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2309 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2310 else
2311 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2312 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2313 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2315 /* Ensure RTL is created for debug args. */
2316 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2318 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2319 unsigned int ix;
2320 tree dtemp;
2322 if (debug_args)
2323 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2325 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2326 expand_debug_expr (dtemp);
2330 lhs = gimple_call_lhs (stmt);
2331 if (lhs)
2332 expand_assignment (lhs, exp, false);
2333 else
2334 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2336 mark_transaction_restart_calls (stmt);
2340 /* Generate RTL for an asm statement (explicit assembler code).
2341 STRING is a STRING_CST node containing the assembler code text,
2342 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2343 insn is volatile; don't optimize it. */
2345 static void
2346 expand_asm_loc (tree string, int vol, location_t locus)
2348 rtx body;
2350 if (TREE_CODE (string) == ADDR_EXPR)
2351 string = TREE_OPERAND (string, 0);
2353 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2354 ggc_strdup (TREE_STRING_POINTER (string)),
2355 locus);
2357 MEM_VOLATILE_P (body) = vol;
2359 emit_insn (body);
2362 /* Return the number of times character C occurs in string S. */
2363 static int
2364 n_occurrences (int c, const char *s)
2366 int n = 0;
2367 while (*s)
2368 n += (*s++ == c);
2369 return n;
2372 /* A subroutine of expand_asm_operands. Check that all operands have
2373 the same number of alternatives. Return true if so. */
2375 static bool
2376 check_operand_nalternatives (tree outputs, tree inputs)
2378 if (outputs || inputs)
2380 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2381 int nalternatives
2382 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2383 tree next = inputs;
2385 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2387 error ("too many alternatives in %<asm%>");
2388 return false;
2391 tmp = outputs;
2392 while (tmp)
2394 const char *constraint
2395 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2397 if (n_occurrences (',', constraint) != nalternatives)
2399 error ("operand constraints for %<asm%> differ "
2400 "in number of alternatives");
2401 return false;
2404 if (TREE_CHAIN (tmp))
2405 tmp = TREE_CHAIN (tmp);
2406 else
2407 tmp = next, next = 0;
2411 return true;
2414 /* Check for overlap between registers marked in CLOBBERED_REGS and
2415 anything inappropriate in T. Emit error and return the register
2416 variable definition for error, NULL_TREE for ok. */
2418 static bool
2419 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2421 /* Conflicts between asm-declared register variables and the clobber
2422 list are not allowed. */
2423 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2425 if (overlap)
2427 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2428 DECL_NAME (overlap));
2430 /* Reset registerness to stop multiple errors emitted for a single
2431 variable. */
2432 DECL_REGISTER (overlap) = 0;
2433 return true;
2436 return false;
2439 /* Generate RTL for an asm statement with arguments.
2440 STRING is the instruction template.
2441 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2442 Each output or input has an expression in the TREE_VALUE and
2443 a tree list in TREE_PURPOSE which in turn contains a constraint
2444 name in TREE_VALUE (or NULL_TREE) and a constraint string
2445 in TREE_PURPOSE.
2446 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2447 that is clobbered by this insn.
2449 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2450 should be the fallthru basic block of the asm goto.
2452 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2453 Some elements of OUTPUTS may be replaced with trees representing temporary
2454 values. The caller should copy those temporary values to the originally
2455 specified lvalues.
2457 VOL nonzero means the insn is volatile; don't optimize it. */
2459 static void
2460 expand_asm_operands (tree string, tree outputs, tree inputs,
2461 tree clobbers, tree labels, basic_block fallthru_bb,
2462 int vol, location_t locus)
2464 rtvec argvec, constraintvec, labelvec;
2465 rtx body;
2466 int ninputs = list_length (inputs);
2467 int noutputs = list_length (outputs);
2468 int nlabels = list_length (labels);
2469 int ninout;
2470 int nclobbers;
2471 HARD_REG_SET clobbered_regs;
2472 int clobber_conflict_found = 0;
2473 tree tail;
2474 tree t;
2475 int i;
2476 /* Vector of RTX's of evaluated output operands. */
2477 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2478 int *inout_opnum = XALLOCAVEC (int, noutputs);
2479 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2480 enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
2481 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2482 int old_generating_concat_p = generating_concat_p;
2483 rtx_code_label *fallthru_label = NULL;
2485 /* An ASM with no outputs needs to be treated as volatile, for now. */
2486 if (noutputs == 0)
2487 vol = 1;
2489 if (! check_operand_nalternatives (outputs, inputs))
2490 return;
2492 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2494 /* Collect constraints. */
2495 i = 0;
2496 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2497 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2498 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2499 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2501 /* Sometimes we wish to automatically clobber registers across an asm.
2502 Case in point is when the i386 backend moved from cc0 to a hard reg --
2503 maintaining source-level compatibility means automatically clobbering
2504 the flags register. */
2505 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2507 /* Count the number of meaningful clobbered registers, ignoring what
2508 we would ignore later. */
2509 nclobbers = 0;
2510 CLEAR_HARD_REG_SET (clobbered_regs);
2511 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2513 const char *regname;
2514 int nregs;
2516 if (TREE_VALUE (tail) == error_mark_node)
2517 return;
2518 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2520 i = decode_reg_name_and_count (regname, &nregs);
2521 if (i == -4)
2522 ++nclobbers;
2523 else if (i == -2)
2524 error ("unknown register name %qs in %<asm%>", regname);
2526 /* Mark clobbered registers. */
2527 if (i >= 0)
2529 int reg;
2531 for (reg = i; reg < i + nregs; reg++)
2533 ++nclobbers;
2535 /* Clobbering the PIC register is an error. */
2536 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2538 error ("PIC register clobbered by %qs in %<asm%>", regname);
2539 return;
2542 SET_HARD_REG_BIT (clobbered_regs, reg);
2547 /* First pass over inputs and outputs checks validity and sets
2548 mark_addressable if needed. */
2550 ninout = 0;
2551 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2553 tree val = TREE_VALUE (tail);
2554 tree type = TREE_TYPE (val);
2555 const char *constraint;
2556 bool is_inout;
2557 bool allows_reg;
2558 bool allows_mem;
2560 /* If there's an erroneous arg, emit no insn. */
2561 if (type == error_mark_node)
2562 return;
2564 /* Try to parse the output constraint. If that fails, there's
2565 no point in going further. */
2566 constraint = constraints[i];
2567 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2568 &allows_mem, &allows_reg, &is_inout))
2569 return;
2571 if (! allows_reg
2572 && (allows_mem
2573 || is_inout
2574 || (DECL_P (val)
2575 && REG_P (DECL_RTL (val))
2576 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2577 mark_addressable (val);
2579 if (is_inout)
2580 ninout++;
2583 ninputs += ninout;
2584 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2586 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2587 return;
2590 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2592 bool allows_reg, allows_mem;
2593 const char *constraint;
2595 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2596 would get VOIDmode and that could cause a crash in reload. */
2597 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2598 return;
2600 constraint = constraints[i + noutputs];
2601 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2602 constraints, &allows_mem, &allows_reg))
2603 return;
2605 if (! allows_reg && allows_mem)
2606 mark_addressable (TREE_VALUE (tail));
2609 /* Second pass evaluates arguments. */
2611 /* Make sure stack is consistent for asm goto. */
2612 if (nlabels > 0)
2613 do_pending_stack_adjust ();
2615 ninout = 0;
2616 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2618 tree val = TREE_VALUE (tail);
2619 tree type = TREE_TYPE (val);
2620 bool is_inout;
2621 bool allows_reg;
2622 bool allows_mem;
2623 rtx op;
2624 bool ok;
2626 ok = parse_output_constraint (&constraints[i], i, ninputs,
2627 noutputs, &allows_mem, &allows_reg,
2628 &is_inout);
2629 gcc_assert (ok);
2631 /* If an output operand is not a decl or indirect ref and our constraint
2632 allows a register, make a temporary to act as an intermediate.
2633 Make the asm insn write into that, then our caller will copy it to
2634 the real output operand. Likewise for promoted variables. */
2636 generating_concat_p = 0;
2638 real_output_rtx[i] = NULL_RTX;
2639 if ((TREE_CODE (val) == INDIRECT_REF
2640 && allows_mem)
2641 || (DECL_P (val)
2642 && (allows_mem || REG_P (DECL_RTL (val)))
2643 && ! (REG_P (DECL_RTL (val))
2644 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2645 || ! allows_reg
2646 || is_inout)
2648 op = expand_expr (val, NULL_RTX, VOIDmode,
2649 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2650 if (MEM_P (op))
2651 op = validize_mem (op);
2653 if (! allows_reg && !MEM_P (op))
2654 error ("output number %d not directly addressable", i);
2655 if ((! allows_mem && MEM_P (op))
2656 || GET_CODE (op) == CONCAT)
2658 real_output_rtx[i] = op;
2659 op = gen_reg_rtx (GET_MODE (op));
2660 if (is_inout)
2661 emit_move_insn (op, real_output_rtx[i]);
2664 else
2666 op = assign_temp (type, 0, 1);
2667 op = validize_mem (op);
2668 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2669 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2670 TREE_VALUE (tail) = make_tree (type, op);
2672 output_rtx[i] = op;
2674 generating_concat_p = old_generating_concat_p;
2676 if (is_inout)
2678 inout_mode[ninout] = TYPE_MODE (type);
2679 inout_opnum[ninout++] = i;
2682 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2683 clobber_conflict_found = 1;
2686 /* Make vectors for the expression-rtx, constraint strings,
2687 and named operands. */
2689 argvec = rtvec_alloc (ninputs);
2690 constraintvec = rtvec_alloc (ninputs);
2691 labelvec = rtvec_alloc (nlabels);
2693 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2694 : GET_MODE (output_rtx[0])),
2695 ggc_strdup (TREE_STRING_POINTER (string)),
2696 empty_string, 0, argvec, constraintvec,
2697 labelvec, locus);
2699 MEM_VOLATILE_P (body) = vol;
2701 /* Eval the inputs and put them into ARGVEC.
2702 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2704 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2706 bool allows_reg, allows_mem;
2707 const char *constraint;
2708 tree val, type;
2709 rtx op;
2710 bool ok;
2712 constraint = constraints[i + noutputs];
2713 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2714 constraints, &allows_mem, &allows_reg);
2715 gcc_assert (ok);
2717 generating_concat_p = 0;
2719 val = TREE_VALUE (tail);
2720 type = TREE_TYPE (val);
2721 /* EXPAND_INITIALIZER will not generate code for valid initializer
2722 constants, but will still generate code for other types of operand.
2723 This is the behavior we want for constant constraints. */
2724 op = expand_expr (val, NULL_RTX, VOIDmode,
2725 allows_reg ? EXPAND_NORMAL
2726 : allows_mem ? EXPAND_MEMORY
2727 : EXPAND_INITIALIZER);
2729 /* Never pass a CONCAT to an ASM. */
2730 if (GET_CODE (op) == CONCAT)
2731 op = force_reg (GET_MODE (op), op);
2732 else if (MEM_P (op))
2733 op = validize_mem (op);
2735 if (asm_operand_ok (op, constraint, NULL) <= 0)
2737 if (allows_reg && TYPE_MODE (type) != BLKmode)
2738 op = force_reg (TYPE_MODE (type), op);
2739 else if (!allows_mem)
2740 warning (0, "asm operand %d probably doesn%'t match constraints",
2741 i + noutputs);
2742 else if (MEM_P (op))
2744 /* We won't recognize either volatile memory or memory
2745 with a queued address as available a memory_operand
2746 at this point. Ignore it: clearly this *is* a memory. */
2748 else
2749 gcc_unreachable ();
2752 generating_concat_p = old_generating_concat_p;
2753 ASM_OPERANDS_INPUT (body, i) = op;
2755 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2756 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2757 ggc_strdup (constraints[i + noutputs]),
2758 locus);
2760 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2761 clobber_conflict_found = 1;
2764 /* Protect all the operands from the queue now that they have all been
2765 evaluated. */
2767 generating_concat_p = 0;
2769 /* For in-out operands, copy output rtx to input rtx. */
2770 for (i = 0; i < ninout; i++)
2772 int j = inout_opnum[i];
2773 char buffer[16];
2775 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2776 = output_rtx[j];
2778 sprintf (buffer, "%d", j);
2779 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2780 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2783 /* Copy labels to the vector. */
2784 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2786 rtx r;
2787 /* If asm goto has any labels in the fallthru basic block, use
2788 a label that we emit immediately after the asm goto. Expansion
2789 may insert further instructions into the same basic block after
2790 asm goto and if we don't do this, insertion of instructions on
2791 the fallthru edge might misbehave. See PR58670. */
2792 if (fallthru_bb
2793 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2795 if (fallthru_label == NULL_RTX)
2796 fallthru_label = gen_label_rtx ();
2797 r = fallthru_label;
2799 else
2800 r = label_rtx (TREE_VALUE (tail));
2801 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2804 generating_concat_p = old_generating_concat_p;
2806 /* Now, for each output, construct an rtx
2807 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2808 ARGVEC CONSTRAINTS OPNAMES))
2809 If there is more than one, put them inside a PARALLEL. */
2811 if (nlabels > 0 && nclobbers == 0)
2813 gcc_assert (noutputs == 0);
2814 emit_jump_insn (body);
2816 else if (noutputs == 0 && nclobbers == 0)
2818 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2819 emit_insn (body);
2821 else if (noutputs == 1 && nclobbers == 0)
2823 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2824 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2826 else
2828 rtx obody = body;
2829 int num = noutputs;
2831 if (num == 0)
2832 num = 1;
2834 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2836 /* For each output operand, store a SET. */
2837 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2839 XVECEXP (body, 0, i)
2840 = gen_rtx_SET (VOIDmode,
2841 output_rtx[i],
2842 gen_rtx_ASM_OPERANDS
2843 (GET_MODE (output_rtx[i]),
2844 ggc_strdup (TREE_STRING_POINTER (string)),
2845 ggc_strdup (constraints[i]),
2846 i, argvec, constraintvec, labelvec, locus));
2848 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2851 /* If there are no outputs (but there are some clobbers)
2852 store the bare ASM_OPERANDS into the PARALLEL. */
2854 if (i == 0)
2855 XVECEXP (body, 0, i++) = obody;
2857 /* Store (clobber REG) for each clobbered register specified. */
2859 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2861 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2862 int reg, nregs;
2863 int j = decode_reg_name_and_count (regname, &nregs);
2864 rtx clobbered_reg;
2866 if (j < 0)
2868 if (j == -3) /* `cc', which is not a register */
2869 continue;
2871 if (j == -4) /* `memory', don't cache memory across asm */
2873 XVECEXP (body, 0, i++)
2874 = gen_rtx_CLOBBER (VOIDmode,
2875 gen_rtx_MEM
2876 (BLKmode,
2877 gen_rtx_SCRATCH (VOIDmode)));
2878 continue;
2881 /* Ignore unknown register, error already signaled. */
2882 continue;
2885 for (reg = j; reg < j + nregs; reg++)
2887 /* Use QImode since that's guaranteed to clobber just
2888 * one reg. */
2889 clobbered_reg = gen_rtx_REG (QImode, reg);
2891 /* Do sanity check for overlap between clobbers and
2892 respectively input and outputs that hasn't been
2893 handled. Such overlap should have been detected and
2894 reported above. */
2895 if (!clobber_conflict_found)
2897 int opno;
2899 /* We test the old body (obody) contents to avoid
2900 tripping over the under-construction body. */
2901 for (opno = 0; opno < noutputs; opno++)
2902 if (reg_overlap_mentioned_p (clobbered_reg,
2903 output_rtx[opno]))
2904 internal_error
2905 ("asm clobber conflict with output operand");
2907 for (opno = 0; opno < ninputs - ninout; opno++)
2908 if (reg_overlap_mentioned_p (clobbered_reg,
2909 ASM_OPERANDS_INPUT (obody,
2910 opno)))
2911 internal_error
2912 ("asm clobber conflict with input operand");
2915 XVECEXP (body, 0, i++)
2916 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2920 if (nlabels > 0)
2921 emit_jump_insn (body);
2922 else
2923 emit_insn (body);
2926 if (fallthru_label)
2927 emit_label (fallthru_label);
2929 /* For any outputs that needed reloading into registers, spill them
2930 back to where they belong. */
2931 for (i = 0; i < noutputs; ++i)
2932 if (real_output_rtx[i])
2933 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2935 crtl->has_asm_statement = 1;
2936 free_temp_slots ();
2940 static void
2941 expand_asm_stmt (gimple stmt)
2943 int noutputs;
2944 tree outputs, tail, t;
2945 tree *o;
2946 size_t i, n;
2947 const char *s;
2948 tree str, out, in, cl, labels;
2949 location_t locus = gimple_location (stmt);
2950 basic_block fallthru_bb = NULL;
2952 /* Meh... convert the gimple asm operands into real tree lists.
2953 Eventually we should make all routines work on the vectors instead
2954 of relying on TREE_CHAIN. */
2955 out = NULL_TREE;
2956 n = gimple_asm_noutputs (stmt);
2957 if (n > 0)
2959 t = out = gimple_asm_output_op (stmt, 0);
2960 for (i = 1; i < n; i++)
2961 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2964 in = NULL_TREE;
2965 n = gimple_asm_ninputs (stmt);
2966 if (n > 0)
2968 t = in = gimple_asm_input_op (stmt, 0);
2969 for (i = 1; i < n; i++)
2970 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2973 cl = NULL_TREE;
2974 n = gimple_asm_nclobbers (stmt);
2975 if (n > 0)
2977 t = cl = gimple_asm_clobber_op (stmt, 0);
2978 for (i = 1; i < n; i++)
2979 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2982 labels = NULL_TREE;
2983 n = gimple_asm_nlabels (stmt);
2984 if (n > 0)
2986 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2987 if (fallthru)
2988 fallthru_bb = fallthru->dest;
2989 t = labels = gimple_asm_label_op (stmt, 0);
2990 for (i = 1; i < n; i++)
2991 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2994 s = gimple_asm_string (stmt);
2995 str = build_string (strlen (s), s);
2997 if (gimple_asm_input_p (stmt))
2999 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3000 return;
3003 outputs = out;
3004 noutputs = gimple_asm_noutputs (stmt);
3005 /* o[I] is the place that output number I should be written. */
3006 o = (tree *) alloca (noutputs * sizeof (tree));
3008 /* Record the contents of OUTPUTS before it is modified. */
3009 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3010 o[i] = TREE_VALUE (tail);
3012 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3013 OUTPUTS some trees for where the values were actually stored. */
3014 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3015 gimple_asm_volatile_p (stmt), locus);
3017 /* Copy all the intermediate outputs into the specified outputs. */
3018 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3020 if (o[i] != TREE_VALUE (tail))
3022 expand_assignment (o[i], TREE_VALUE (tail), false);
3023 free_temp_slots ();
3025 /* Restore the original value so that it's correct the next
3026 time we expand this function. */
3027 TREE_VALUE (tail) = o[i];
3032 /* Emit code to jump to the address
3033 specified by the pointer expression EXP. */
3035 static void
3036 expand_computed_goto (tree exp)
3038 rtx x = expand_normal (exp);
3040 x = convert_memory_address (Pmode, x);
3042 do_pending_stack_adjust ();
3043 emit_indirect_jump (x);
3046 /* Generate RTL code for a `goto' statement with target label LABEL.
3047 LABEL should be a LABEL_DECL tree node that was or will later be
3048 defined with `expand_label'. */
3050 static void
3051 expand_goto (tree label)
3053 #ifdef ENABLE_CHECKING
3054 /* Check for a nonlocal goto to a containing function. Should have
3055 gotten translated to __builtin_nonlocal_goto. */
3056 tree context = decl_function_context (label);
3057 gcc_assert (!context || context == current_function_decl);
3058 #endif
3060 emit_jump (label_rtx (label));
3063 /* Output a return with no value. */
3065 static void
3066 expand_null_return_1 (void)
3068 clear_pending_stack_adjust ();
3069 do_pending_stack_adjust ();
3070 emit_jump (return_label);
3073 /* Generate RTL to return from the current function, with no value.
3074 (That is, we do not do anything about returning any value.) */
3076 void
3077 expand_null_return (void)
3079 /* If this function was declared to return a value, but we
3080 didn't, clobber the return registers so that they are not
3081 propagated live to the rest of the function. */
3082 clobber_return_register ();
3084 expand_null_return_1 ();
3087 /* Generate RTL to return from the current function, with value VAL. */
3089 static void
3090 expand_value_return (rtx val)
3092 /* Copy the value to the return location unless it's already there. */
3094 tree decl = DECL_RESULT (current_function_decl);
3095 rtx return_reg = DECL_RTL (decl);
3096 if (return_reg != val)
3098 tree funtype = TREE_TYPE (current_function_decl);
3099 tree type = TREE_TYPE (decl);
3100 int unsignedp = TYPE_UNSIGNED (type);
3101 enum machine_mode old_mode = DECL_MODE (decl);
3102 enum machine_mode mode;
3103 if (DECL_BY_REFERENCE (decl))
3104 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3105 else
3106 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3108 if (mode != old_mode)
3109 val = convert_modes (mode, old_mode, val, unsignedp);
3111 if (GET_CODE (return_reg) == PARALLEL)
3112 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3113 else
3114 emit_move_insn (return_reg, val);
3117 expand_null_return_1 ();
3120 /* Generate RTL to evaluate the expression RETVAL and return it
3121 from the current function. */
3123 static void
3124 expand_return (tree retval)
3126 rtx result_rtl;
3127 rtx val = 0;
3128 tree retval_rhs;
3130 /* If function wants no value, give it none. */
3131 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3133 expand_normal (retval);
3134 expand_null_return ();
3135 return;
3138 if (retval == error_mark_node)
3140 /* Treat this like a return of no value from a function that
3141 returns a value. */
3142 expand_null_return ();
3143 return;
3145 else if ((TREE_CODE (retval) == MODIFY_EXPR
3146 || TREE_CODE (retval) == INIT_EXPR)
3147 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3148 retval_rhs = TREE_OPERAND (retval, 1);
3149 else
3150 retval_rhs = retval;
3152 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3154 /* If we are returning the RESULT_DECL, then the value has already
3155 been stored into it, so we don't have to do anything special. */
3156 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3157 expand_value_return (result_rtl);
3159 /* If the result is an aggregate that is being returned in one (or more)
3160 registers, load the registers here. */
3162 else if (retval_rhs != 0
3163 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3164 && REG_P (result_rtl))
3166 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3167 if (val)
3169 /* Use the mode of the result value on the return register. */
3170 PUT_MODE (result_rtl, GET_MODE (val));
3171 expand_value_return (val);
3173 else
3174 expand_null_return ();
3176 else if (retval_rhs != 0
3177 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3178 && (REG_P (result_rtl)
3179 || (GET_CODE (result_rtl) == PARALLEL)))
3181 /* Compute the return value into a temporary (usually a pseudo reg). */
3183 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3184 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3185 val = force_not_mem (val);
3186 expand_value_return (val);
3188 else
3190 /* No hard reg used; calculate value into hard return reg. */
3191 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3192 expand_value_return (result_rtl);
3196 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3197 STMT that doesn't require special handling for outgoing edges. That
3198 is no tailcalls and no GIMPLE_COND. */
3200 static void
3201 expand_gimple_stmt_1 (gimple stmt)
3203 tree op0;
3205 set_curr_insn_location (gimple_location (stmt));
3207 switch (gimple_code (stmt))
3209 case GIMPLE_GOTO:
3210 op0 = gimple_goto_dest (stmt);
3211 if (TREE_CODE (op0) == LABEL_DECL)
3212 expand_goto (op0);
3213 else
3214 expand_computed_goto (op0);
3215 break;
3216 case GIMPLE_LABEL:
3217 expand_label (gimple_label_label (stmt));
3218 break;
3219 case GIMPLE_NOP:
3220 case GIMPLE_PREDICT:
3221 break;
3222 case GIMPLE_SWITCH:
3223 expand_case (stmt);
3224 break;
3225 case GIMPLE_ASM:
3226 expand_asm_stmt (stmt);
3227 break;
3228 case GIMPLE_CALL:
3229 expand_call_stmt (stmt);
3230 break;
3232 case GIMPLE_RETURN:
3233 op0 = gimple_return_retval (stmt);
3235 if (op0 && op0 != error_mark_node)
3237 tree result = DECL_RESULT (current_function_decl);
3239 /* If we are not returning the current function's RESULT_DECL,
3240 build an assignment to it. */
3241 if (op0 != result)
3243 /* I believe that a function's RESULT_DECL is unique. */
3244 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3246 /* ??? We'd like to use simply expand_assignment here,
3247 but this fails if the value is of BLKmode but the return
3248 decl is a register. expand_return has special handling
3249 for this combination, which eventually should move
3250 to common code. See comments there. Until then, let's
3251 build a modify expression :-/ */
3252 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3253 result, op0);
3256 if (!op0)
3257 expand_null_return ();
3258 else
3259 expand_return (op0);
3260 break;
3262 case GIMPLE_ASSIGN:
3264 tree lhs = gimple_assign_lhs (stmt);
3266 /* Tree expand used to fiddle with |= and &= of two bitfield
3267 COMPONENT_REFs here. This can't happen with gimple, the LHS
3268 of binary assigns must be a gimple reg. */
3270 if (TREE_CODE (lhs) != SSA_NAME
3271 || get_gimple_rhs_class (gimple_expr_code (stmt))
3272 == GIMPLE_SINGLE_RHS)
3274 tree rhs = gimple_assign_rhs1 (stmt);
3275 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3276 == GIMPLE_SINGLE_RHS);
3277 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3278 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3279 if (TREE_CLOBBER_P (rhs))
3280 /* This is a clobber to mark the going out of scope for
3281 this LHS. */
3283 else
3284 expand_assignment (lhs, rhs,
3285 gimple_assign_nontemporal_move_p (stmt));
3287 else
3289 rtx target, temp;
3290 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3291 struct separate_ops ops;
3292 bool promoted = false;
3294 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3295 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3296 promoted = true;
3298 ops.code = gimple_assign_rhs_code (stmt);
3299 ops.type = TREE_TYPE (lhs);
3300 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3302 case GIMPLE_TERNARY_RHS:
3303 ops.op2 = gimple_assign_rhs3 (stmt);
3304 /* Fallthru */
3305 case GIMPLE_BINARY_RHS:
3306 ops.op1 = gimple_assign_rhs2 (stmt);
3307 /* Fallthru */
3308 case GIMPLE_UNARY_RHS:
3309 ops.op0 = gimple_assign_rhs1 (stmt);
3310 break;
3311 default:
3312 gcc_unreachable ();
3314 ops.location = gimple_location (stmt);
3316 /* If we want to use a nontemporal store, force the value to
3317 register first. If we store into a promoted register,
3318 don't directly expand to target. */
3319 temp = nontemporal || promoted ? NULL_RTX : target;
3320 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3321 EXPAND_NORMAL);
3323 if (temp == target)
3325 else if (promoted)
3327 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3328 /* If TEMP is a VOIDmode constant, use convert_modes to make
3329 sure that we properly convert it. */
3330 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3332 temp = convert_modes (GET_MODE (target),
3333 TYPE_MODE (ops.type),
3334 temp, unsignedp);
3335 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3336 GET_MODE (target), temp, unsignedp);
3339 convert_move (SUBREG_REG (target), temp, unsignedp);
3341 else if (nontemporal && emit_storent_insn (target, temp))
3343 else
3345 temp = force_operand (temp, target);
3346 if (temp != target)
3347 emit_move_insn (target, temp);
3351 break;
3353 default:
3354 gcc_unreachable ();
3358 /* Expand one gimple statement STMT and return the last RTL instruction
3359 before any of the newly generated ones.
3361 In addition to generating the necessary RTL instructions this also
3362 sets REG_EH_REGION notes if necessary and sets the current source
3363 location for diagnostics. */
3365 static rtx_insn *
3366 expand_gimple_stmt (gimple stmt)
3368 location_t saved_location = input_location;
3369 rtx_insn *last = get_last_insn ();
3370 int lp_nr;
3372 gcc_assert (cfun);
3374 /* We need to save and restore the current source location so that errors
3375 discovered during expansion are emitted with the right location. But
3376 it would be better if the diagnostic routines used the source location
3377 embedded in the tree nodes rather than globals. */
3378 if (gimple_has_location (stmt))
3379 input_location = gimple_location (stmt);
3381 expand_gimple_stmt_1 (stmt);
3383 /* Free any temporaries used to evaluate this statement. */
3384 free_temp_slots ();
3386 input_location = saved_location;
3388 /* Mark all insns that may trap. */
3389 lp_nr = lookup_stmt_eh_lp (stmt);
3390 if (lp_nr)
3392 rtx_insn *insn;
3393 for (insn = next_real_insn (last); insn;
3394 insn = next_real_insn (insn))
3396 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3397 /* If we want exceptions for non-call insns, any
3398 may_trap_p instruction may throw. */
3399 && GET_CODE (PATTERN (insn)) != CLOBBER
3400 && GET_CODE (PATTERN (insn)) != USE
3401 && insn_could_throw_p (insn))
3402 make_reg_eh_region_note (insn, 0, lp_nr);
3406 return last;
3409 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3410 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3411 generated a tail call (something that might be denied by the ABI
3412 rules governing the call; see calls.c).
3414 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3415 can still reach the rest of BB. The case here is __builtin_sqrt,
3416 where the NaN result goes through the external function (with a
3417 tailcall) and the normal result happens via a sqrt instruction. */
3419 static basic_block
3420 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
3422 rtx_insn *last2, *last;
3423 edge e;
3424 edge_iterator ei;
3425 int probability;
3426 gcov_type count;
3428 last2 = last = expand_gimple_stmt (stmt);
3430 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3431 if (CALL_P (last) && SIBLING_CALL_P (last))
3432 goto found;
3434 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3436 *can_fallthru = true;
3437 return NULL;
3439 found:
3440 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3441 Any instructions emitted here are about to be deleted. */
3442 do_pending_stack_adjust ();
3444 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3445 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3446 EH or abnormal edges, we shouldn't have created a tail call in
3447 the first place. So it seems to me we should just be removing
3448 all edges here, or redirecting the existing fallthru edge to
3449 the exit block. */
3451 probability = 0;
3452 count = 0;
3454 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3456 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3458 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3460 e->dest->count -= e->count;
3461 e->dest->frequency -= EDGE_FREQUENCY (e);
3462 if (e->dest->count < 0)
3463 e->dest->count = 0;
3464 if (e->dest->frequency < 0)
3465 e->dest->frequency = 0;
3467 count += e->count;
3468 probability += e->probability;
3469 remove_edge (e);
3471 else
3472 ei_next (&ei);
3475 /* This is somewhat ugly: the call_expr expander often emits instructions
3476 after the sibcall (to perform the function return). These confuse the
3477 find_many_sub_basic_blocks code, so we need to get rid of these. */
3478 last = NEXT_INSN (last);
3479 gcc_assert (BARRIER_P (last));
3481 *can_fallthru = false;
3482 while (NEXT_INSN (last))
3484 /* For instance an sqrt builtin expander expands if with
3485 sibcall in the then and label for `else`. */
3486 if (LABEL_P (NEXT_INSN (last)))
3488 *can_fallthru = true;
3489 break;
3491 delete_insn (NEXT_INSN (last));
3494 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3495 | EDGE_SIBCALL);
3496 e->probability += probability;
3497 e->count += count;
3498 BB_END (bb) = last;
3499 update_bb_for_insn (bb);
3501 if (NEXT_INSN (last))
3503 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3505 last = BB_END (bb);
3506 if (BARRIER_P (last))
3507 BB_END (bb) = PREV_INSN (last);
3510 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3512 return bb;
3515 /* Return the difference between the floor and the truncated result of
3516 a signed division by OP1 with remainder MOD. */
3517 static rtx
3518 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3520 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3521 return gen_rtx_IF_THEN_ELSE
3522 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3523 gen_rtx_IF_THEN_ELSE
3524 (mode, gen_rtx_LT (BImode,
3525 gen_rtx_DIV (mode, op1, mod),
3526 const0_rtx),
3527 constm1_rtx, const0_rtx),
3528 const0_rtx);
3531 /* Return the difference between the ceil and the truncated result of
3532 a signed division by OP1 with remainder MOD. */
3533 static rtx
3534 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3536 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3537 return gen_rtx_IF_THEN_ELSE
3538 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3539 gen_rtx_IF_THEN_ELSE
3540 (mode, gen_rtx_GT (BImode,
3541 gen_rtx_DIV (mode, op1, mod),
3542 const0_rtx),
3543 const1_rtx, const0_rtx),
3544 const0_rtx);
3547 /* Return the difference between the ceil and the truncated result of
3548 an unsigned division by OP1 with remainder MOD. */
3549 static rtx
3550 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3552 /* (mod != 0 ? 1 : 0) */
3553 return gen_rtx_IF_THEN_ELSE
3554 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3555 const1_rtx, const0_rtx);
3558 /* Return the difference between the rounded and the truncated result
3559 of a signed division by OP1 with remainder MOD. Halfway cases are
3560 rounded away from zero, rather than to the nearest even number. */
3561 static rtx
3562 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3564 /* (abs (mod) >= abs (op1) - abs (mod)
3565 ? (op1 / mod > 0 ? 1 : -1)
3566 : 0) */
3567 return gen_rtx_IF_THEN_ELSE
3568 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3569 gen_rtx_MINUS (mode,
3570 gen_rtx_ABS (mode, op1),
3571 gen_rtx_ABS (mode, mod))),
3572 gen_rtx_IF_THEN_ELSE
3573 (mode, gen_rtx_GT (BImode,
3574 gen_rtx_DIV (mode, op1, mod),
3575 const0_rtx),
3576 const1_rtx, constm1_rtx),
3577 const0_rtx);
3580 /* Return the difference between the rounded and the truncated result
3581 of a unsigned division by OP1 with remainder MOD. Halfway cases
3582 are rounded away from zero, rather than to the nearest even
3583 number. */
3584 static rtx
3585 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
3587 /* (mod >= op1 - mod ? 1 : 0) */
3588 return gen_rtx_IF_THEN_ELSE
3589 (mode, gen_rtx_GE (BImode, mod,
3590 gen_rtx_MINUS (mode, op1, mod)),
3591 const1_rtx, const0_rtx);
3594 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3595 any rtl. */
3597 static rtx
3598 convert_debug_memory_address (enum machine_mode mode, rtx x,
3599 addr_space_t as)
3601 enum machine_mode xmode = GET_MODE (x);
3603 #ifndef POINTERS_EXTEND_UNSIGNED
3604 gcc_assert (mode == Pmode
3605 || mode == targetm.addr_space.address_mode (as));
3606 gcc_assert (xmode == mode || xmode == VOIDmode);
3607 #else
3608 rtx temp;
3610 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3612 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3613 return x;
3615 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3616 x = simplify_gen_subreg (mode, x, xmode,
3617 subreg_lowpart_offset
3618 (mode, xmode));
3619 else if (POINTERS_EXTEND_UNSIGNED > 0)
3620 x = gen_rtx_ZERO_EXTEND (mode, x);
3621 else if (!POINTERS_EXTEND_UNSIGNED)
3622 x = gen_rtx_SIGN_EXTEND (mode, x);
3623 else
3625 switch (GET_CODE (x))
3627 case SUBREG:
3628 if ((SUBREG_PROMOTED_VAR_P (x)
3629 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3630 || (GET_CODE (SUBREG_REG (x)) == PLUS
3631 && REG_P (XEXP (SUBREG_REG (x), 0))
3632 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3633 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3634 && GET_MODE (SUBREG_REG (x)) == mode)
3635 return SUBREG_REG (x);
3636 break;
3637 case LABEL_REF:
3638 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3639 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3640 return temp;
3641 case SYMBOL_REF:
3642 temp = shallow_copy_rtx (x);
3643 PUT_MODE (temp, mode);
3644 return temp;
3645 case CONST:
3646 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3647 if (temp)
3648 temp = gen_rtx_CONST (mode, temp);
3649 return temp;
3650 case PLUS:
3651 case MINUS:
3652 if (CONST_INT_P (XEXP (x, 1)))
3654 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3655 if (temp)
3656 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3658 break;
3659 default:
3660 break;
3662 /* Don't know how to express ptr_extend as operation in debug info. */
3663 return NULL;
3665 #endif /* POINTERS_EXTEND_UNSIGNED */
3667 return x;
3670 /* Return an RTX equivalent to the value of the parameter DECL. */
3672 static rtx
3673 expand_debug_parm_decl (tree decl)
3675 rtx incoming = DECL_INCOMING_RTL (decl);
3677 if (incoming
3678 && GET_MODE (incoming) != BLKmode
3679 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3680 || (MEM_P (incoming)
3681 && REG_P (XEXP (incoming, 0))
3682 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3684 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3686 #ifdef HAVE_window_save
3687 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3688 If the target machine has an explicit window save instruction, the
3689 actual entry value is the corresponding OUTGOING_REGNO instead. */
3690 if (REG_P (incoming)
3691 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3692 incoming
3693 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3694 OUTGOING_REGNO (REGNO (incoming)), 0);
3695 else if (MEM_P (incoming))
3697 rtx reg = XEXP (incoming, 0);
3698 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3700 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3701 incoming = replace_equiv_address_nv (incoming, reg);
3703 else
3704 incoming = copy_rtx (incoming);
3706 #endif
3708 ENTRY_VALUE_EXP (rtl) = incoming;
3709 return rtl;
3712 if (incoming
3713 && GET_MODE (incoming) != BLKmode
3714 && !TREE_ADDRESSABLE (decl)
3715 && MEM_P (incoming)
3716 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3717 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3718 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3719 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3720 return copy_rtx (incoming);
3722 return NULL_RTX;
3725 /* Return an RTX equivalent to the value of the tree expression EXP. */
3727 static rtx
3728 expand_debug_expr (tree exp)
3730 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3731 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3732 enum machine_mode inner_mode = VOIDmode;
3733 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3734 addr_space_t as;
3736 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3738 case tcc_expression:
3739 switch (TREE_CODE (exp))
3741 case COND_EXPR:
3742 case DOT_PROD_EXPR:
3743 case SAD_EXPR:
3744 case WIDEN_MULT_PLUS_EXPR:
3745 case WIDEN_MULT_MINUS_EXPR:
3746 case FMA_EXPR:
3747 goto ternary;
3749 case TRUTH_ANDIF_EXPR:
3750 case TRUTH_ORIF_EXPR:
3751 case TRUTH_AND_EXPR:
3752 case TRUTH_OR_EXPR:
3753 case TRUTH_XOR_EXPR:
3754 goto binary;
3756 case TRUTH_NOT_EXPR:
3757 goto unary;
3759 default:
3760 break;
3762 break;
3764 ternary:
3765 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3766 if (!op2)
3767 return NULL_RTX;
3768 /* Fall through. */
3770 binary:
3771 case tcc_binary:
3772 case tcc_comparison:
3773 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3774 if (!op1)
3775 return NULL_RTX;
3776 /* Fall through. */
3778 unary:
3779 case tcc_unary:
3780 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3781 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3782 if (!op0)
3783 return NULL_RTX;
3784 break;
3786 case tcc_type:
3787 case tcc_statement:
3788 gcc_unreachable ();
3790 case tcc_constant:
3791 case tcc_exceptional:
3792 case tcc_declaration:
3793 case tcc_reference:
3794 case tcc_vl_exp:
3795 break;
3798 switch (TREE_CODE (exp))
3800 case STRING_CST:
3801 if (!lookup_constant_def (exp))
3803 if (strlen (TREE_STRING_POINTER (exp)) + 1
3804 != (size_t) TREE_STRING_LENGTH (exp))
3805 return NULL_RTX;
3806 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3807 op0 = gen_rtx_MEM (BLKmode, op0);
3808 set_mem_attributes (op0, exp, 0);
3809 return op0;
3811 /* Fall through... */
3813 case INTEGER_CST:
3814 case REAL_CST:
3815 case FIXED_CST:
3816 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3817 return op0;
3819 case COMPLEX_CST:
3820 gcc_assert (COMPLEX_MODE_P (mode));
3821 op0 = expand_debug_expr (TREE_REALPART (exp));
3822 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3823 return gen_rtx_CONCAT (mode, op0, op1);
3825 case DEBUG_EXPR_DECL:
3826 op0 = DECL_RTL_IF_SET (exp);
3828 if (op0)
3829 return op0;
3831 op0 = gen_rtx_DEBUG_EXPR (mode);
3832 DEBUG_EXPR_TREE_DECL (op0) = exp;
3833 SET_DECL_RTL (exp, op0);
3835 return op0;
3837 case VAR_DECL:
3838 case PARM_DECL:
3839 case FUNCTION_DECL:
3840 case LABEL_DECL:
3841 case CONST_DECL:
3842 case RESULT_DECL:
3843 op0 = DECL_RTL_IF_SET (exp);
3845 /* This decl was probably optimized away. */
3846 if (!op0)
3848 if (TREE_CODE (exp) != VAR_DECL
3849 || DECL_EXTERNAL (exp)
3850 || !TREE_STATIC (exp)
3851 || !DECL_NAME (exp)
3852 || DECL_HARD_REGISTER (exp)
3853 || DECL_IN_CONSTANT_POOL (exp)
3854 || mode == VOIDmode)
3855 return NULL;
3857 op0 = make_decl_rtl_for_debug (exp);
3858 if (!MEM_P (op0)
3859 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3860 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3861 return NULL;
3863 else
3864 op0 = copy_rtx (op0);
3866 if (GET_MODE (op0) == BLKmode
3867 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3868 below would ICE. While it is likely a FE bug,
3869 try to be robust here. See PR43166. */
3870 || mode == BLKmode
3871 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3873 gcc_assert (MEM_P (op0));
3874 op0 = adjust_address_nv (op0, mode, 0);
3875 return op0;
3878 /* Fall through. */
3880 adjust_mode:
3881 case PAREN_EXPR:
3882 case NOP_EXPR:
3883 case CONVERT_EXPR:
3885 inner_mode = GET_MODE (op0);
3887 if (mode == inner_mode)
3888 return op0;
3890 if (inner_mode == VOIDmode)
3892 if (TREE_CODE (exp) == SSA_NAME)
3893 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3894 else
3895 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3896 if (mode == inner_mode)
3897 return op0;
3900 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3902 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3903 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3904 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3905 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3906 else
3907 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3909 else if (FLOAT_MODE_P (mode))
3911 gcc_assert (TREE_CODE (exp) != SSA_NAME);
3912 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3913 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3914 else
3915 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3917 else if (FLOAT_MODE_P (inner_mode))
3919 if (unsignedp)
3920 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3921 else
3922 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3924 else if (CONSTANT_P (op0)
3925 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3926 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3927 subreg_lowpart_offset (mode,
3928 inner_mode));
3929 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3930 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3931 : unsignedp)
3932 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3933 else
3934 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3936 return op0;
3939 case MEM_REF:
3940 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3942 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3943 TREE_OPERAND (exp, 0),
3944 TREE_OPERAND (exp, 1));
3945 if (newexp)
3946 return expand_debug_expr (newexp);
3948 /* FALLTHROUGH */
3949 case INDIRECT_REF:
3950 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3951 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3952 if (!op0)
3953 return NULL;
3955 if (TREE_CODE (exp) == MEM_REF)
3957 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3958 || (GET_CODE (op0) == PLUS
3959 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3960 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3961 Instead just use get_inner_reference. */
3962 goto component_ref;
3964 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3965 if (!op1 || !CONST_INT_P (op1))
3966 return NULL;
3968 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
3971 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
3973 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3974 op0, as);
3975 if (op0 == NULL_RTX)
3976 return NULL;
3978 op0 = gen_rtx_MEM (mode, op0);
3979 set_mem_attributes (op0, exp, 0);
3980 if (TREE_CODE (exp) == MEM_REF
3981 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3982 set_mem_expr (op0, NULL_TREE);
3983 set_mem_addr_space (op0, as);
3985 return op0;
3987 case TARGET_MEM_REF:
3988 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3989 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
3990 return NULL;
3992 op0 = expand_debug_expr
3993 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
3994 if (!op0)
3995 return NULL;
3997 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
3998 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3999 op0, as);
4000 if (op0 == NULL_RTX)
4001 return NULL;
4003 op0 = gen_rtx_MEM (mode, op0);
4005 set_mem_attributes (op0, exp, 0);
4006 set_mem_addr_space (op0, as);
4008 return op0;
4010 component_ref:
4011 case ARRAY_REF:
4012 case ARRAY_RANGE_REF:
4013 case COMPONENT_REF:
4014 case BIT_FIELD_REF:
4015 case REALPART_EXPR:
4016 case IMAGPART_EXPR:
4017 case VIEW_CONVERT_EXPR:
4019 enum machine_mode mode1;
4020 HOST_WIDE_INT bitsize, bitpos;
4021 tree offset;
4022 int volatilep = 0;
4023 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4024 &mode1, &unsignedp, &volatilep, false);
4025 rtx orig_op0;
4027 if (bitsize == 0)
4028 return NULL;
4030 orig_op0 = op0 = expand_debug_expr (tem);
4032 if (!op0)
4033 return NULL;
4035 if (offset)
4037 enum machine_mode addrmode, offmode;
4039 if (!MEM_P (op0))
4040 return NULL;
4042 op0 = XEXP (op0, 0);
4043 addrmode = GET_MODE (op0);
4044 if (addrmode == VOIDmode)
4045 addrmode = Pmode;
4047 op1 = expand_debug_expr (offset);
4048 if (!op1)
4049 return NULL;
4051 offmode = GET_MODE (op1);
4052 if (offmode == VOIDmode)
4053 offmode = TYPE_MODE (TREE_TYPE (offset));
4055 if (addrmode != offmode)
4056 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4057 subreg_lowpart_offset (addrmode,
4058 offmode));
4060 /* Don't use offset_address here, we don't need a
4061 recognizable address, and we don't want to generate
4062 code. */
4063 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4064 op0, op1));
4067 if (MEM_P (op0))
4069 if (mode1 == VOIDmode)
4070 /* Bitfield. */
4071 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4072 if (bitpos >= BITS_PER_UNIT)
4074 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4075 bitpos %= BITS_PER_UNIT;
4077 else if (bitpos < 0)
4079 HOST_WIDE_INT units
4080 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4081 op0 = adjust_address_nv (op0, mode1, units);
4082 bitpos += units * BITS_PER_UNIT;
4084 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4085 op0 = adjust_address_nv (op0, mode, 0);
4086 else if (GET_MODE (op0) != mode1)
4087 op0 = adjust_address_nv (op0, mode1, 0);
4088 else
4089 op0 = copy_rtx (op0);
4090 if (op0 == orig_op0)
4091 op0 = shallow_copy_rtx (op0);
4092 set_mem_attributes (op0, exp, 0);
4095 if (bitpos == 0 && mode == GET_MODE (op0))
4096 return op0;
4098 if (bitpos < 0)
4099 return NULL;
4101 if (GET_MODE (op0) == BLKmode)
4102 return NULL;
4104 if ((bitpos % BITS_PER_UNIT) == 0
4105 && bitsize == GET_MODE_BITSIZE (mode1))
4107 enum machine_mode opmode = GET_MODE (op0);
4109 if (opmode == VOIDmode)
4110 opmode = TYPE_MODE (TREE_TYPE (tem));
4112 /* This condition may hold if we're expanding the address
4113 right past the end of an array that turned out not to
4114 be addressable (i.e., the address was only computed in
4115 debug stmts). The gen_subreg below would rightfully
4116 crash, and the address doesn't really exist, so just
4117 drop it. */
4118 if (bitpos >= GET_MODE_BITSIZE (opmode))
4119 return NULL;
4121 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4122 return simplify_gen_subreg (mode, op0, opmode,
4123 bitpos / BITS_PER_UNIT);
4126 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4127 && TYPE_UNSIGNED (TREE_TYPE (exp))
4128 ? SIGN_EXTRACT
4129 : ZERO_EXTRACT, mode,
4130 GET_MODE (op0) != VOIDmode
4131 ? GET_MODE (op0)
4132 : TYPE_MODE (TREE_TYPE (tem)),
4133 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4136 case ABS_EXPR:
4137 return simplify_gen_unary (ABS, mode, op0, mode);
4139 case NEGATE_EXPR:
4140 return simplify_gen_unary (NEG, mode, op0, mode);
4142 case BIT_NOT_EXPR:
4143 return simplify_gen_unary (NOT, mode, op0, mode);
4145 case FLOAT_EXPR:
4146 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4147 0)))
4148 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4149 inner_mode);
4151 case FIX_TRUNC_EXPR:
4152 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4153 inner_mode);
4155 case POINTER_PLUS_EXPR:
4156 /* For the rare target where pointers are not the same size as
4157 size_t, we need to check for mis-matched modes and correct
4158 the addend. */
4159 if (op0 && op1
4160 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4161 && GET_MODE (op0) != GET_MODE (op1))
4163 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4164 /* If OP0 is a partial mode, then we must truncate, even if it has
4165 the same bitsize as OP1 as GCC's representation of partial modes
4166 is opaque. */
4167 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4168 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4169 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4170 GET_MODE (op1));
4171 else
4172 /* We always sign-extend, regardless of the signedness of
4173 the operand, because the operand is always unsigned
4174 here even if the original C expression is signed. */
4175 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4176 GET_MODE (op1));
4178 /* Fall through. */
4179 case PLUS_EXPR:
4180 return simplify_gen_binary (PLUS, mode, op0, op1);
4182 case MINUS_EXPR:
4183 return simplify_gen_binary (MINUS, mode, op0, op1);
4185 case MULT_EXPR:
4186 return simplify_gen_binary (MULT, mode, op0, op1);
4188 case RDIV_EXPR:
4189 case TRUNC_DIV_EXPR:
4190 case EXACT_DIV_EXPR:
4191 if (unsignedp)
4192 return simplify_gen_binary (UDIV, mode, op0, op1);
4193 else
4194 return simplify_gen_binary (DIV, mode, op0, op1);
4196 case TRUNC_MOD_EXPR:
4197 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4199 case FLOOR_DIV_EXPR:
4200 if (unsignedp)
4201 return simplify_gen_binary (UDIV, mode, op0, op1);
4202 else
4204 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4205 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4206 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4207 return simplify_gen_binary (PLUS, mode, div, adj);
4210 case FLOOR_MOD_EXPR:
4211 if (unsignedp)
4212 return simplify_gen_binary (UMOD, mode, op0, op1);
4213 else
4215 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4216 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4217 adj = simplify_gen_unary (NEG, mode,
4218 simplify_gen_binary (MULT, mode, adj, op1),
4219 mode);
4220 return simplify_gen_binary (PLUS, mode, mod, adj);
4223 case CEIL_DIV_EXPR:
4224 if (unsignedp)
4226 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4227 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4228 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4229 return simplify_gen_binary (PLUS, mode, div, adj);
4231 else
4233 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4234 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4235 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4236 return simplify_gen_binary (PLUS, mode, div, adj);
4239 case CEIL_MOD_EXPR:
4240 if (unsignedp)
4242 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4243 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4244 adj = simplify_gen_unary (NEG, mode,
4245 simplify_gen_binary (MULT, mode, adj, op1),
4246 mode);
4247 return simplify_gen_binary (PLUS, mode, mod, adj);
4249 else
4251 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4252 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4253 adj = simplify_gen_unary (NEG, mode,
4254 simplify_gen_binary (MULT, mode, adj, op1),
4255 mode);
4256 return simplify_gen_binary (PLUS, mode, mod, adj);
4259 case ROUND_DIV_EXPR:
4260 if (unsignedp)
4262 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4263 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4264 rtx adj = round_udiv_adjust (mode, mod, op1);
4265 return simplify_gen_binary (PLUS, mode, div, adj);
4267 else
4269 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4270 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4271 rtx adj = round_sdiv_adjust (mode, mod, op1);
4272 return simplify_gen_binary (PLUS, mode, div, adj);
4275 case ROUND_MOD_EXPR:
4276 if (unsignedp)
4278 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4279 rtx adj = round_udiv_adjust (mode, mod, op1);
4280 adj = simplify_gen_unary (NEG, mode,
4281 simplify_gen_binary (MULT, mode, adj, op1),
4282 mode);
4283 return simplify_gen_binary (PLUS, mode, mod, adj);
4285 else
4287 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4288 rtx adj = round_sdiv_adjust (mode, mod, op1);
4289 adj = simplify_gen_unary (NEG, mode,
4290 simplify_gen_binary (MULT, mode, adj, op1),
4291 mode);
4292 return simplify_gen_binary (PLUS, mode, mod, adj);
4295 case LSHIFT_EXPR:
4296 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4298 case RSHIFT_EXPR:
4299 if (unsignedp)
4300 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4301 else
4302 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4304 case LROTATE_EXPR:
4305 return simplify_gen_binary (ROTATE, mode, op0, op1);
4307 case RROTATE_EXPR:
4308 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4310 case MIN_EXPR:
4311 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4313 case MAX_EXPR:
4314 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4316 case BIT_AND_EXPR:
4317 case TRUTH_AND_EXPR:
4318 return simplify_gen_binary (AND, mode, op0, op1);
4320 case BIT_IOR_EXPR:
4321 case TRUTH_OR_EXPR:
4322 return simplify_gen_binary (IOR, mode, op0, op1);
4324 case BIT_XOR_EXPR:
4325 case TRUTH_XOR_EXPR:
4326 return simplify_gen_binary (XOR, mode, op0, op1);
4328 case TRUTH_ANDIF_EXPR:
4329 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4331 case TRUTH_ORIF_EXPR:
4332 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4334 case TRUTH_NOT_EXPR:
4335 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4337 case LT_EXPR:
4338 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4339 op0, op1);
4341 case LE_EXPR:
4342 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4343 op0, op1);
4345 case GT_EXPR:
4346 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4347 op0, op1);
4349 case GE_EXPR:
4350 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4351 op0, op1);
4353 case EQ_EXPR:
4354 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4356 case NE_EXPR:
4357 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4359 case UNORDERED_EXPR:
4360 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4362 case ORDERED_EXPR:
4363 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4365 case UNLT_EXPR:
4366 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4368 case UNLE_EXPR:
4369 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4371 case UNGT_EXPR:
4372 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4374 case UNGE_EXPR:
4375 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4377 case UNEQ_EXPR:
4378 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4380 case LTGT_EXPR:
4381 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4383 case COND_EXPR:
4384 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4386 case COMPLEX_EXPR:
4387 gcc_assert (COMPLEX_MODE_P (mode));
4388 if (GET_MODE (op0) == VOIDmode)
4389 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4390 if (GET_MODE (op1) == VOIDmode)
4391 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4392 return gen_rtx_CONCAT (mode, op0, op1);
4394 case CONJ_EXPR:
4395 if (GET_CODE (op0) == CONCAT)
4396 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4397 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4398 XEXP (op0, 1),
4399 GET_MODE_INNER (mode)));
4400 else
4402 enum machine_mode imode = GET_MODE_INNER (mode);
4403 rtx re, im;
4405 if (MEM_P (op0))
4407 re = adjust_address_nv (op0, imode, 0);
4408 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4410 else
4412 enum machine_mode ifmode = int_mode_for_mode (mode);
4413 enum machine_mode ihmode = int_mode_for_mode (imode);
4414 rtx halfsize;
4415 if (ifmode == BLKmode || ihmode == BLKmode)
4416 return NULL;
4417 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4418 re = op0;
4419 if (mode != ifmode)
4420 re = gen_rtx_SUBREG (ifmode, re, 0);
4421 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4422 if (imode != ihmode)
4423 re = gen_rtx_SUBREG (imode, re, 0);
4424 im = copy_rtx (op0);
4425 if (mode != ifmode)
4426 im = gen_rtx_SUBREG (ifmode, im, 0);
4427 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4428 if (imode != ihmode)
4429 im = gen_rtx_SUBREG (imode, im, 0);
4431 im = gen_rtx_NEG (imode, im);
4432 return gen_rtx_CONCAT (mode, re, im);
4435 case ADDR_EXPR:
4436 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4437 if (!op0 || !MEM_P (op0))
4439 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4440 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4441 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4442 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4443 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4444 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4446 if (handled_component_p (TREE_OPERAND (exp, 0)))
4448 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4449 tree decl
4450 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4451 &bitoffset, &bitsize, &maxsize);
4452 if ((TREE_CODE (decl) == VAR_DECL
4453 || TREE_CODE (decl) == PARM_DECL
4454 || TREE_CODE (decl) == RESULT_DECL)
4455 && (!TREE_ADDRESSABLE (decl)
4456 || target_for_debug_bind (decl))
4457 && (bitoffset % BITS_PER_UNIT) == 0
4458 && bitsize > 0
4459 && bitsize == maxsize)
4461 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4462 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4466 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4467 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4468 == ADDR_EXPR)
4470 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4471 0));
4472 if (op0 != NULL
4473 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4474 || (GET_CODE (op0) == PLUS
4475 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4476 && CONST_INT_P (XEXP (op0, 1)))))
4478 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4479 1));
4480 if (!op1 || !CONST_INT_P (op1))
4481 return NULL;
4483 return plus_constant (mode, op0, INTVAL (op1));
4487 return NULL;
4490 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4491 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4493 return op0;
4495 case VECTOR_CST:
4497 unsigned i;
4499 op0 = gen_rtx_CONCATN
4500 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4502 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4504 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4505 if (!op1)
4506 return NULL;
4507 XVECEXP (op0, 0, i) = op1;
4510 return op0;
4513 case CONSTRUCTOR:
4514 if (TREE_CLOBBER_P (exp))
4515 return NULL;
4516 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4518 unsigned i;
4519 tree val;
4521 op0 = gen_rtx_CONCATN
4522 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4524 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4526 op1 = expand_debug_expr (val);
4527 if (!op1)
4528 return NULL;
4529 XVECEXP (op0, 0, i) = op1;
4532 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4534 op1 = expand_debug_expr
4535 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4537 if (!op1)
4538 return NULL;
4540 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4541 XVECEXP (op0, 0, i) = op1;
4544 return op0;
4546 else
4547 goto flag_unsupported;
4549 case CALL_EXPR:
4550 /* ??? Maybe handle some builtins? */
4551 return NULL;
4553 case SSA_NAME:
4555 gimple g = get_gimple_for_ssa_name (exp);
4556 if (g)
4558 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4559 if (!op0)
4560 return NULL;
4562 else
4564 int part = var_to_partition (SA.map, exp);
4566 if (part == NO_PARTITION)
4568 /* If this is a reference to an incoming value of parameter
4569 that is never used in the code or where the incoming
4570 value is never used in the code, use PARM_DECL's
4571 DECL_RTL if set. */
4572 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4573 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4575 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4576 if (op0)
4577 goto adjust_mode;
4578 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4579 if (op0)
4580 goto adjust_mode;
4582 return NULL;
4585 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4587 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4589 goto adjust_mode;
4592 case ERROR_MARK:
4593 return NULL;
4595 /* Vector stuff. For most of the codes we don't have rtl codes. */
4596 case REALIGN_LOAD_EXPR:
4597 case REDUC_MAX_EXPR:
4598 case REDUC_MIN_EXPR:
4599 case REDUC_PLUS_EXPR:
4600 case VEC_COND_EXPR:
4601 case VEC_PACK_FIX_TRUNC_EXPR:
4602 case VEC_PACK_SAT_EXPR:
4603 case VEC_PACK_TRUNC_EXPR:
4604 case VEC_RSHIFT_EXPR:
4605 case VEC_UNPACK_FLOAT_HI_EXPR:
4606 case VEC_UNPACK_FLOAT_LO_EXPR:
4607 case VEC_UNPACK_HI_EXPR:
4608 case VEC_UNPACK_LO_EXPR:
4609 case VEC_WIDEN_MULT_HI_EXPR:
4610 case VEC_WIDEN_MULT_LO_EXPR:
4611 case VEC_WIDEN_MULT_EVEN_EXPR:
4612 case VEC_WIDEN_MULT_ODD_EXPR:
4613 case VEC_WIDEN_LSHIFT_HI_EXPR:
4614 case VEC_WIDEN_LSHIFT_LO_EXPR:
4615 case VEC_PERM_EXPR:
4616 return NULL;
4618 /* Misc codes. */
4619 case ADDR_SPACE_CONVERT_EXPR:
4620 case FIXED_CONVERT_EXPR:
4621 case OBJ_TYPE_REF:
4622 case WITH_SIZE_EXPR:
4623 return NULL;
4625 case DOT_PROD_EXPR:
4626 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4627 && SCALAR_INT_MODE_P (mode))
4630 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4631 0)))
4632 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4633 inner_mode);
4635 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4636 1)))
4637 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4638 inner_mode);
4639 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4640 return simplify_gen_binary (PLUS, mode, op0, op2);
4642 return NULL;
4644 case WIDEN_MULT_EXPR:
4645 case WIDEN_MULT_PLUS_EXPR:
4646 case WIDEN_MULT_MINUS_EXPR:
4647 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4648 && SCALAR_INT_MODE_P (mode))
4650 inner_mode = GET_MODE (op0);
4651 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4652 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4653 else
4654 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4655 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4656 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4657 else
4658 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4659 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4660 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4661 return op0;
4662 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4663 return simplify_gen_binary (PLUS, mode, op0, op2);
4664 else
4665 return simplify_gen_binary (MINUS, mode, op2, op0);
4667 return NULL;
4669 case MULT_HIGHPART_EXPR:
4670 /* ??? Similar to the above. */
4671 return NULL;
4673 case WIDEN_SUM_EXPR:
4674 case WIDEN_LSHIFT_EXPR:
4675 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4676 && SCALAR_INT_MODE_P (mode))
4679 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4680 0)))
4681 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4682 inner_mode);
4683 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4684 ? ASHIFT : PLUS, mode, op0, op1);
4686 return NULL;
4688 case FMA_EXPR:
4689 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4691 default:
4692 flag_unsupported:
4693 #ifdef ENABLE_CHECKING
4694 debug_tree (exp);
4695 gcc_unreachable ();
4696 #else
4697 return NULL;
4698 #endif
4702 /* Return an RTX equivalent to the source bind value of the tree expression
4703 EXP. */
4705 static rtx
4706 expand_debug_source_expr (tree exp)
4708 rtx op0 = NULL_RTX;
4709 enum machine_mode mode = VOIDmode, inner_mode;
4711 switch (TREE_CODE (exp))
4713 case PARM_DECL:
4715 mode = DECL_MODE (exp);
4716 op0 = expand_debug_parm_decl (exp);
4717 if (op0)
4718 break;
4719 /* See if this isn't an argument that has been completely
4720 optimized out. */
4721 if (!DECL_RTL_SET_P (exp)
4722 && !DECL_INCOMING_RTL (exp)
4723 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4725 tree aexp = DECL_ORIGIN (exp);
4726 if (DECL_CONTEXT (aexp)
4727 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4729 vec<tree, va_gc> **debug_args;
4730 unsigned int ix;
4731 tree ddecl;
4732 debug_args = decl_debug_args_lookup (current_function_decl);
4733 if (debug_args != NULL)
4735 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4736 ix += 2)
4737 if (ddecl == aexp)
4738 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4742 break;
4744 default:
4745 break;
4748 if (op0 == NULL_RTX)
4749 return NULL_RTX;
4751 inner_mode = GET_MODE (op0);
4752 if (mode == inner_mode)
4753 return op0;
4755 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4757 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4758 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4759 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4760 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4761 else
4762 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4764 else if (FLOAT_MODE_P (mode))
4765 gcc_unreachable ();
4766 else if (FLOAT_MODE_P (inner_mode))
4768 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4769 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4770 else
4771 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4773 else if (CONSTANT_P (op0)
4774 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4775 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4776 subreg_lowpart_offset (mode, inner_mode));
4777 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4778 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4779 else
4780 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4782 return op0;
4785 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4786 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4787 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4789 static void
4790 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4792 rtx exp = *exp_p;
4794 if (exp == NULL_RTX)
4795 return;
4797 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4798 return;
4800 if (depth == 4)
4802 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4803 rtx dval = make_debug_expr_from_rtl (exp);
4805 /* Emit a debug bind insn before INSN. */
4806 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4807 DEBUG_EXPR_TREE_DECL (dval), exp,
4808 VAR_INIT_STATUS_INITIALIZED);
4810 emit_debug_insn_before (bind, insn);
4811 *exp_p = dval;
4812 return;
4815 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4816 int i, j;
4817 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4818 switch (*format_ptr++)
4820 case 'e':
4821 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4822 break;
4824 case 'E':
4825 case 'V':
4826 for (j = 0; j < XVECLEN (exp, i); j++)
4827 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4828 break;
4830 default:
4831 break;
4835 /* Expand the _LOCs in debug insns. We run this after expanding all
4836 regular insns, so that any variables referenced in the function
4837 will have their DECL_RTLs set. */
4839 static void
4840 expand_debug_locations (void)
4842 rtx_insn *insn;
4843 rtx_insn *last = get_last_insn ();
4844 int save_strict_alias = flag_strict_aliasing;
4846 /* New alias sets while setting up memory attributes cause
4847 -fcompare-debug failures, even though it doesn't bring about any
4848 codegen changes. */
4849 flag_strict_aliasing = 0;
4851 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4852 if (DEBUG_INSN_P (insn))
4854 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4855 rtx val;
4856 rtx_insn *prev_insn, *insn2;
4857 enum machine_mode mode;
4859 if (value == NULL_TREE)
4860 val = NULL_RTX;
4861 else
4863 if (INSN_VAR_LOCATION_STATUS (insn)
4864 == VAR_INIT_STATUS_UNINITIALIZED)
4865 val = expand_debug_source_expr (value);
4866 else
4867 val = expand_debug_expr (value);
4868 gcc_assert (last == get_last_insn ());
4871 if (!val)
4872 val = gen_rtx_UNKNOWN_VAR_LOC ();
4873 else
4875 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4877 gcc_assert (mode == GET_MODE (val)
4878 || (GET_MODE (val) == VOIDmode
4879 && (CONST_SCALAR_INT_P (val)
4880 || GET_CODE (val) == CONST_FIXED
4881 || GET_CODE (val) == LABEL_REF)));
4884 INSN_VAR_LOCATION_LOC (insn) = val;
4885 prev_insn = PREV_INSN (insn);
4886 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4887 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
4890 flag_strict_aliasing = save_strict_alias;
4893 /* Expand basic block BB from GIMPLE trees to RTL. */
4895 static basic_block
4896 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
4898 gimple_stmt_iterator gsi;
4899 gimple_seq stmts;
4900 gimple stmt = NULL;
4901 rtx_note *note;
4902 rtx_insn *last;
4903 edge e;
4904 edge_iterator ei;
4906 if (dump_file)
4907 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4908 bb->index);
4910 /* Note that since we are now transitioning from GIMPLE to RTL, we
4911 cannot use the gsi_*_bb() routines because they expect the basic
4912 block to be in GIMPLE, instead of RTL. Therefore, we need to
4913 access the BB sequence directly. */
4914 stmts = bb_seq (bb);
4915 bb->il.gimple.seq = NULL;
4916 bb->il.gimple.phi_nodes = NULL;
4917 rtl_profile_for_bb (bb);
4918 init_rtl_bb_info (bb);
4919 bb->flags |= BB_RTL;
4921 /* Remove the RETURN_EXPR if we may fall though to the exit
4922 instead. */
4923 gsi = gsi_last (stmts);
4924 if (!gsi_end_p (gsi)
4925 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
4927 gimple ret_stmt = gsi_stmt (gsi);
4929 gcc_assert (single_succ_p (bb));
4930 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
4932 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4933 && !gimple_return_retval (ret_stmt))
4935 gsi_remove (&gsi, false);
4936 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4940 gsi = gsi_start (stmts);
4941 if (!gsi_end_p (gsi))
4943 stmt = gsi_stmt (gsi);
4944 if (gimple_code (stmt) != GIMPLE_LABEL)
4945 stmt = NULL;
4948 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
4950 if (stmt || elt)
4952 last = get_last_insn ();
4954 if (stmt)
4956 expand_gimple_stmt (stmt);
4957 gsi_next (&gsi);
4960 if (elt)
4961 emit_label (*elt);
4963 /* Java emits line number notes in the top of labels.
4964 ??? Make this go away once line number notes are obsoleted. */
4965 BB_HEAD (bb) = NEXT_INSN (last);
4966 if (NOTE_P (BB_HEAD (bb)))
4967 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
4968 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
4970 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4972 else
4973 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
4975 NOTE_BASIC_BLOCK (note) = bb;
4977 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4979 basic_block new_bb;
4981 stmt = gsi_stmt (gsi);
4983 /* If this statement is a non-debug one, and we generate debug
4984 insns, then this one might be the last real use of a TERed
4985 SSA_NAME, but where there are still some debug uses further
4986 down. Expanding the current SSA name in such further debug
4987 uses by their RHS might lead to wrong debug info, as coalescing
4988 might make the operands of such RHS be placed into the same
4989 pseudo as something else. Like so:
4990 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4991 use(a_1);
4992 a_2 = ...
4993 #DEBUG ... => a_1
4994 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4995 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
4996 the write to a_2 would actually have clobbered the place which
4997 formerly held a_0.
4999 So, instead of that, we recognize the situation, and generate
5000 debug temporaries at the last real use of TERed SSA names:
5001 a_1 = a_0 + 1;
5002 #DEBUG #D1 => a_1
5003 use(a_1);
5004 a_2 = ...
5005 #DEBUG ... => #D1
5007 if (MAY_HAVE_DEBUG_INSNS
5008 && SA.values
5009 && !is_gimple_debug (stmt))
5011 ssa_op_iter iter;
5012 tree op;
5013 gimple def;
5015 location_t sloc = curr_insn_location ();
5017 /* Look for SSA names that have their last use here (TERed
5018 names always have only one real use). */
5019 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5020 if ((def = get_gimple_for_ssa_name (op)))
5022 imm_use_iterator imm_iter;
5023 use_operand_p use_p;
5024 bool have_debug_uses = false;
5026 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5028 if (gimple_debug_bind_p (USE_STMT (use_p)))
5030 have_debug_uses = true;
5031 break;
5035 if (have_debug_uses)
5037 /* OP is a TERed SSA name, with DEF it's defining
5038 statement, and where OP is used in further debug
5039 instructions. Generate a debug temporary, and
5040 replace all uses of OP in debug insns with that
5041 temporary. */
5042 gimple debugstmt;
5043 tree value = gimple_assign_rhs_to_tree (def);
5044 tree vexpr = make_node (DEBUG_EXPR_DECL);
5045 rtx val;
5046 enum machine_mode mode;
5048 set_curr_insn_location (gimple_location (def));
5050 DECL_ARTIFICIAL (vexpr) = 1;
5051 TREE_TYPE (vexpr) = TREE_TYPE (value);
5052 if (DECL_P (value))
5053 mode = DECL_MODE (value);
5054 else
5055 mode = TYPE_MODE (TREE_TYPE (value));
5056 DECL_MODE (vexpr) = mode;
5058 val = gen_rtx_VAR_LOCATION
5059 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5061 emit_debug_insn (val);
5063 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5065 if (!gimple_debug_bind_p (debugstmt))
5066 continue;
5068 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5069 SET_USE (use_p, vexpr);
5071 update_stmt (debugstmt);
5075 set_curr_insn_location (sloc);
5078 currently_expanding_gimple_stmt = stmt;
5080 /* Expand this statement, then evaluate the resulting RTL and
5081 fixup the CFG accordingly. */
5082 if (gimple_code (stmt) == GIMPLE_COND)
5084 new_bb = expand_gimple_cond (bb, stmt);
5085 if (new_bb)
5086 return new_bb;
5088 else if (gimple_debug_bind_p (stmt))
5090 location_t sloc = curr_insn_location ();
5091 gimple_stmt_iterator nsi = gsi;
5093 for (;;)
5095 tree var = gimple_debug_bind_get_var (stmt);
5096 tree value;
5097 rtx val;
5098 enum machine_mode mode;
5100 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5101 && TREE_CODE (var) != LABEL_DECL
5102 && !target_for_debug_bind (var))
5103 goto delink_debug_stmt;
5105 if (gimple_debug_bind_has_value_p (stmt))
5106 value = gimple_debug_bind_get_value (stmt);
5107 else
5108 value = NULL_TREE;
5110 last = get_last_insn ();
5112 set_curr_insn_location (gimple_location (stmt));
5114 if (DECL_P (var))
5115 mode = DECL_MODE (var);
5116 else
5117 mode = TYPE_MODE (TREE_TYPE (var));
5119 val = gen_rtx_VAR_LOCATION
5120 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5122 emit_debug_insn (val);
5124 if (dump_file && (dump_flags & TDF_DETAILS))
5126 /* We can't dump the insn with a TREE where an RTX
5127 is expected. */
5128 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5129 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5130 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5133 delink_debug_stmt:
5134 /* In order not to generate too many debug temporaries,
5135 we delink all uses of debug statements we already expanded.
5136 Therefore debug statements between definition and real
5137 use of TERed SSA names will continue to use the SSA name,
5138 and not be replaced with debug temps. */
5139 delink_stmt_imm_use (stmt);
5141 gsi = nsi;
5142 gsi_next (&nsi);
5143 if (gsi_end_p (nsi))
5144 break;
5145 stmt = gsi_stmt (nsi);
5146 if (!gimple_debug_bind_p (stmt))
5147 break;
5150 set_curr_insn_location (sloc);
5152 else if (gimple_debug_source_bind_p (stmt))
5154 location_t sloc = curr_insn_location ();
5155 tree var = gimple_debug_source_bind_get_var (stmt);
5156 tree value = gimple_debug_source_bind_get_value (stmt);
5157 rtx val;
5158 enum machine_mode mode;
5160 last = get_last_insn ();
5162 set_curr_insn_location (gimple_location (stmt));
5164 mode = DECL_MODE (var);
5166 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5167 VAR_INIT_STATUS_UNINITIALIZED);
5169 emit_debug_insn (val);
5171 if (dump_file && (dump_flags & TDF_DETAILS))
5173 /* We can't dump the insn with a TREE where an RTX
5174 is expected. */
5175 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5176 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5177 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5180 set_curr_insn_location (sloc);
5182 else
5184 if (is_gimple_call (stmt)
5185 && gimple_call_tail_p (stmt)
5186 && disable_tail_calls)
5187 gimple_call_set_tail (stmt, false);
5189 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
5191 bool can_fallthru;
5192 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5193 if (new_bb)
5195 if (can_fallthru)
5196 bb = new_bb;
5197 else
5198 return new_bb;
5201 else
5203 def_operand_p def_p;
5204 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5206 if (def_p != NULL)
5208 /* Ignore this stmt if it is in the list of
5209 replaceable expressions. */
5210 if (SA.values
5211 && bitmap_bit_p (SA.values,
5212 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5213 continue;
5215 last = expand_gimple_stmt (stmt);
5216 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5221 currently_expanding_gimple_stmt = NULL;
5223 /* Expand implicit goto and convert goto_locus. */
5224 FOR_EACH_EDGE (e, ei, bb->succs)
5226 if (e->goto_locus != UNKNOWN_LOCATION)
5227 set_curr_insn_location (e->goto_locus);
5228 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5230 emit_jump (label_rtx_for_bb (e->dest));
5231 e->flags &= ~EDGE_FALLTHRU;
5235 /* Expanded RTL can create a jump in the last instruction of block.
5236 This later might be assumed to be a jump to successor and break edge insertion.
5237 We need to insert dummy move to prevent this. PR41440. */
5238 if (single_succ_p (bb)
5239 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5240 && (last = get_last_insn ())
5241 && JUMP_P (last))
5243 rtx dummy = gen_reg_rtx (SImode);
5244 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5247 do_pending_stack_adjust ();
5249 /* Find the block tail. The last insn in the block is the insn
5250 before a barrier and/or table jump insn. */
5251 last = get_last_insn ();
5252 if (BARRIER_P (last))
5253 last = PREV_INSN (last);
5254 if (JUMP_TABLE_DATA_P (last))
5255 last = PREV_INSN (PREV_INSN (last));
5256 BB_END (bb) = last;
5258 update_bb_for_insn (bb);
5260 return bb;
5264 /* Create a basic block for initialization code. */
5266 static basic_block
5267 construct_init_block (void)
5269 basic_block init_block, first_block;
5270 edge e = NULL;
5271 int flags;
5273 /* Multiple entry points not supported yet. */
5274 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5275 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5276 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5277 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5278 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5280 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5282 /* When entry edge points to first basic block, we don't need jump,
5283 otherwise we have to jump into proper target. */
5284 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5286 tree label = gimple_block_label (e->dest);
5288 emit_jump (label_rtx (label));
5289 flags = 0;
5291 else
5292 flags = EDGE_FALLTHRU;
5294 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5295 get_last_insn (),
5296 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5297 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5298 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5299 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5300 if (e)
5302 first_block = e->dest;
5303 redirect_edge_succ (e, init_block);
5304 e = make_edge (init_block, first_block, flags);
5306 else
5307 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5308 e->probability = REG_BR_PROB_BASE;
5309 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5311 update_bb_for_insn (init_block);
5312 return init_block;
5315 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5316 found in the block tree. */
5318 static void
5319 set_block_levels (tree block, int level)
5321 while (block)
5323 BLOCK_NUMBER (block) = level;
5324 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5325 block = BLOCK_CHAIN (block);
5329 /* Create a block containing landing pads and similar stuff. */
5331 static void
5332 construct_exit_block (void)
5334 rtx_insn *head = get_last_insn ();
5335 rtx_insn *end;
5336 basic_block exit_block;
5337 edge e, e2;
5338 unsigned ix;
5339 edge_iterator ei;
5340 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5341 rtx_insn *orig_end = BB_END (prev_bb);
5343 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5345 /* Make sure the locus is set to the end of the function, so that
5346 epilogue line numbers and warnings are set properly. */
5347 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5348 input_location = cfun->function_end_locus;
5350 /* Generate rtl for function exit. */
5351 expand_function_end ();
5353 end = get_last_insn ();
5354 if (head == end)
5355 return;
5356 /* While emitting the function end we could move end of the last basic
5357 block. */
5358 BB_END (prev_bb) = orig_end;
5359 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5360 head = NEXT_INSN (head);
5361 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5362 bb frequency counting will be confused. Any instructions before that
5363 label are emitted for the case where PREV_BB falls through into the
5364 exit block, so append those instructions to prev_bb in that case. */
5365 if (NEXT_INSN (head) != return_label)
5367 while (NEXT_INSN (head) != return_label)
5369 if (!NOTE_P (NEXT_INSN (head)))
5370 BB_END (prev_bb) = NEXT_INSN (head);
5371 head = NEXT_INSN (head);
5374 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5375 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5376 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5377 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5379 ix = 0;
5380 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5382 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5383 if (!(e->flags & EDGE_ABNORMAL))
5384 redirect_edge_succ (e, exit_block);
5385 else
5386 ix++;
5389 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5390 e->probability = REG_BR_PROB_BASE;
5391 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5392 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5393 if (e2 != e)
5395 e->count -= e2->count;
5396 exit_block->count -= e2->count;
5397 exit_block->frequency -= EDGE_FREQUENCY (e2);
5399 if (e->count < 0)
5400 e->count = 0;
5401 if (exit_block->count < 0)
5402 exit_block->count = 0;
5403 if (exit_block->frequency < 0)
5404 exit_block->frequency = 0;
5405 update_bb_for_insn (exit_block);
5408 /* Helper function for discover_nonconstant_array_refs.
5409 Look for ARRAY_REF nodes with non-constant indexes and mark them
5410 addressable. */
5412 static tree
5413 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5414 void *data ATTRIBUTE_UNUSED)
5416 tree t = *tp;
5418 if (IS_TYPE_OR_DECL_P (t))
5419 *walk_subtrees = 0;
5420 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5422 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5423 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5424 && (!TREE_OPERAND (t, 2)
5425 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5426 || (TREE_CODE (t) == COMPONENT_REF
5427 && (!TREE_OPERAND (t,2)
5428 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5429 || TREE_CODE (t) == BIT_FIELD_REF
5430 || TREE_CODE (t) == REALPART_EXPR
5431 || TREE_CODE (t) == IMAGPART_EXPR
5432 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5433 || CONVERT_EXPR_P (t))
5434 t = TREE_OPERAND (t, 0);
5436 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5438 t = get_base_address (t);
5439 if (t && DECL_P (t)
5440 && DECL_MODE (t) != BLKmode)
5441 TREE_ADDRESSABLE (t) = 1;
5444 *walk_subtrees = 0;
5447 return NULL_TREE;
5450 /* RTL expansion is not able to compile array references with variable
5451 offsets for arrays stored in single register. Discover such
5452 expressions and mark variables as addressable to avoid this
5453 scenario. */
5455 static void
5456 discover_nonconstant_array_refs (void)
5458 basic_block bb;
5459 gimple_stmt_iterator gsi;
5461 FOR_EACH_BB_FN (bb, cfun)
5462 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5464 gimple stmt = gsi_stmt (gsi);
5465 if (!is_gimple_debug (stmt))
5466 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5470 /* This function sets crtl->args.internal_arg_pointer to a virtual
5471 register if DRAP is needed. Local register allocator will replace
5472 virtual_incoming_args_rtx with the virtual register. */
5474 static void
5475 expand_stack_alignment (void)
5477 rtx drap_rtx;
5478 unsigned int preferred_stack_boundary;
5480 if (! SUPPORTS_STACK_ALIGNMENT)
5481 return;
5483 if (cfun->calls_alloca
5484 || cfun->has_nonlocal_label
5485 || crtl->has_nonlocal_goto)
5486 crtl->need_drap = true;
5488 /* Call update_stack_boundary here again to update incoming stack
5489 boundary. It may set incoming stack alignment to a different
5490 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5491 use the minimum incoming stack alignment to check if it is OK
5492 to perform sibcall optimization since sibcall optimization will
5493 only align the outgoing stack to incoming stack boundary. */
5494 if (targetm.calls.update_stack_boundary)
5495 targetm.calls.update_stack_boundary ();
5497 /* The incoming stack frame has to be aligned at least at
5498 parm_stack_boundary. */
5499 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5501 /* Update crtl->stack_alignment_estimated and use it later to align
5502 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5503 exceptions since callgraph doesn't collect incoming stack alignment
5504 in this case. */
5505 if (cfun->can_throw_non_call_exceptions
5506 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5507 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5508 else
5509 preferred_stack_boundary = crtl->preferred_stack_boundary;
5510 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5511 crtl->stack_alignment_estimated = preferred_stack_boundary;
5512 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5513 crtl->stack_alignment_needed = preferred_stack_boundary;
5515 gcc_assert (crtl->stack_alignment_needed
5516 <= crtl->stack_alignment_estimated);
5518 crtl->stack_realign_needed
5519 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5520 crtl->stack_realign_tried = crtl->stack_realign_needed;
5522 crtl->stack_realign_processed = true;
5524 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5525 alignment. */
5526 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5527 drap_rtx = targetm.calls.get_drap_rtx ();
5529 /* stack_realign_drap and drap_rtx must match. */
5530 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5532 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5533 if (NULL != drap_rtx)
5535 crtl->args.internal_arg_pointer = drap_rtx;
5537 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5538 needed. */
5539 fixup_tail_calls ();
5544 static void
5545 expand_main_function (void)
5547 #if (defined(INVOKE__main) \
5548 || (!defined(HAS_INIT_SECTION) \
5549 && !defined(INIT_SECTION_ASM_OP) \
5550 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5551 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5552 #endif
5556 /* Expand code to initialize the stack_protect_guard. This is invoked at
5557 the beginning of a function to be protected. */
5559 #ifndef HAVE_stack_protect_set
5560 # define HAVE_stack_protect_set 0
5561 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5562 #endif
5564 static void
5565 stack_protect_prologue (void)
5567 tree guard_decl = targetm.stack_protect_guard ();
5568 rtx x, y;
5570 x = expand_normal (crtl->stack_protect_guard);
5571 y = expand_normal (guard_decl);
5573 /* Allow the target to copy from Y to X without leaking Y into a
5574 register. */
5575 if (HAVE_stack_protect_set)
5577 rtx insn = gen_stack_protect_set (x, y);
5578 if (insn)
5580 emit_insn (insn);
5581 return;
5585 /* Otherwise do a straight move. */
5586 emit_move_insn (x, y);
5589 /* Translate the intermediate representation contained in the CFG
5590 from GIMPLE trees to RTL.
5592 We do conversion per basic block and preserve/update the tree CFG.
5593 This implies we have to do some magic as the CFG can simultaneously
5594 consist of basic blocks containing RTL and GIMPLE trees. This can
5595 confuse the CFG hooks, so be careful to not manipulate CFG during
5596 the expansion. */
5598 namespace {
5600 const pass_data pass_data_expand =
5602 RTL_PASS, /* type */
5603 "expand", /* name */
5604 OPTGROUP_NONE, /* optinfo_flags */
5605 TV_EXPAND, /* tv_id */
5606 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5607 | PROP_gimple_lcx
5608 | PROP_gimple_lvec ), /* properties_required */
5609 PROP_rtl, /* properties_provided */
5610 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5611 0, /* todo_flags_start */
5612 0, /* todo_flags_finish */
5615 class pass_expand : public rtl_opt_pass
5617 public:
5618 pass_expand (gcc::context *ctxt)
5619 : rtl_opt_pass (pass_data_expand, ctxt)
5622 /* opt_pass methods: */
5623 virtual unsigned int execute (function *);
5625 }; // class pass_expand
5627 unsigned int
5628 pass_expand::execute (function *fun)
5630 basic_block bb, init_block;
5631 sbitmap blocks;
5632 edge_iterator ei;
5633 edge e;
5634 rtx_insn *var_seq, *var_ret_seq;
5635 unsigned i;
5637 timevar_push (TV_OUT_OF_SSA);
5638 rewrite_out_of_ssa (&SA);
5639 timevar_pop (TV_OUT_OF_SSA);
5640 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5642 /* Make sure all values used by the optimization passes have sane
5643 defaults. */
5644 reg_renumber = 0;
5646 /* Some backends want to know that we are expanding to RTL. */
5647 currently_expanding_to_rtl = 1;
5648 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5649 free_dominance_info (CDI_DOMINATORS);
5651 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5653 insn_locations_init ();
5654 if (!DECL_IS_BUILTIN (current_function_decl))
5656 /* Eventually, all FEs should explicitly set function_start_locus. */
5657 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5658 set_curr_insn_location
5659 (DECL_SOURCE_LOCATION (current_function_decl));
5660 else
5661 set_curr_insn_location (fun->function_start_locus);
5663 else
5664 set_curr_insn_location (UNKNOWN_LOCATION);
5665 prologue_location = curr_insn_location ();
5667 #ifdef INSN_SCHEDULING
5668 init_sched_attrs ();
5669 #endif
5671 /* Make sure first insn is a note even if we don't want linenums.
5672 This makes sure the first insn will never be deleted.
5673 Also, final expects a note to appear there. */
5674 emit_note (NOTE_INSN_DELETED);
5676 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5677 discover_nonconstant_array_refs ();
5679 targetm.expand_to_rtl_hook ();
5680 crtl->stack_alignment_needed = STACK_BOUNDARY;
5681 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5682 crtl->stack_alignment_estimated = 0;
5683 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5684 fun->cfg->max_jumptable_ents = 0;
5686 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5687 of the function section at exapnsion time to predict distance of calls. */
5688 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5690 /* Expand the variables recorded during gimple lowering. */
5691 timevar_push (TV_VAR_EXPAND);
5692 start_sequence ();
5694 var_ret_seq = expand_used_vars ();
5696 var_seq = get_insns ();
5697 end_sequence ();
5698 timevar_pop (TV_VAR_EXPAND);
5700 /* Honor stack protection warnings. */
5701 if (warn_stack_protect)
5703 if (fun->calls_alloca)
5704 warning (OPT_Wstack_protector,
5705 "stack protector not protecting local variables: "
5706 "variable length buffer");
5707 if (has_short_buffer && !crtl->stack_protect_guard)
5708 warning (OPT_Wstack_protector,
5709 "stack protector not protecting function: "
5710 "all local arrays are less than %d bytes long",
5711 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5714 /* Set up parameters and prepare for return, for the function. */
5715 expand_function_start (current_function_decl);
5717 /* If we emitted any instructions for setting up the variables,
5718 emit them before the FUNCTION_START note. */
5719 if (var_seq)
5721 emit_insn_before (var_seq, parm_birth_insn);
5723 /* In expand_function_end we'll insert the alloca save/restore
5724 before parm_birth_insn. We've just insertted an alloca call.
5725 Adjust the pointer to match. */
5726 parm_birth_insn = var_seq;
5729 /* Now that we also have the parameter RTXs, copy them over to our
5730 partitions. */
5731 for (i = 0; i < SA.map->num_partitions; i++)
5733 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5735 if (TREE_CODE (var) != VAR_DECL
5736 && !SA.partition_to_pseudo[i])
5737 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5738 gcc_assert (SA.partition_to_pseudo[i]);
5740 /* If this decl was marked as living in multiple places, reset
5741 this now to NULL. */
5742 if (DECL_RTL_IF_SET (var) == pc_rtx)
5743 SET_DECL_RTL (var, NULL);
5745 /* Some RTL parts really want to look at DECL_RTL(x) when x
5746 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5747 SET_DECL_RTL here making this available, but that would mean
5748 to select one of the potentially many RTLs for one DECL. Instead
5749 of doing that we simply reset the MEM_EXPR of the RTL in question,
5750 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5751 if (!DECL_RTL_SET_P (var))
5753 if (MEM_P (SA.partition_to_pseudo[i]))
5754 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5758 /* If we have a class containing differently aligned pointers
5759 we need to merge those into the corresponding RTL pointer
5760 alignment. */
5761 for (i = 1; i < num_ssa_names; i++)
5763 tree name = ssa_name (i);
5764 int part;
5765 rtx r;
5767 if (!name
5768 /* We might have generated new SSA names in
5769 update_alias_info_with_stack_vars. They will have a NULL
5770 defining statements, and won't be part of the partitioning,
5771 so ignore those. */
5772 || !SSA_NAME_DEF_STMT (name))
5773 continue;
5774 part = var_to_partition (SA.map, name);
5775 if (part == NO_PARTITION)
5776 continue;
5778 /* Adjust all partition members to get the underlying decl of
5779 the representative which we might have created in expand_one_var. */
5780 if (SSA_NAME_VAR (name) == NULL_TREE)
5782 tree leader = partition_to_var (SA.map, part);
5783 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5784 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5786 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5787 continue;
5789 r = SA.partition_to_pseudo[part];
5790 if (REG_P (r))
5791 mark_reg_pointer (r, get_pointer_alignment (name));
5794 /* If this function is `main', emit a call to `__main'
5795 to run global initializers, etc. */
5796 if (DECL_NAME (current_function_decl)
5797 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5798 && DECL_FILE_SCOPE_P (current_function_decl))
5799 expand_main_function ();
5801 /* Initialize the stack_protect_guard field. This must happen after the
5802 call to __main (if any) so that the external decl is initialized. */
5803 if (crtl->stack_protect_guard)
5804 stack_protect_prologue ();
5806 expand_phi_nodes (&SA);
5808 /* Register rtl specific functions for cfg. */
5809 rtl_register_cfg_hooks ();
5811 init_block = construct_init_block ();
5813 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5814 remaining edges later. */
5815 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
5816 e->flags &= ~EDGE_EXECUTABLE;
5818 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
5819 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
5820 next_bb)
5821 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
5823 if (MAY_HAVE_DEBUG_INSNS)
5824 expand_debug_locations ();
5826 /* Free stuff we no longer need after GIMPLE optimizations. */
5827 free_dominance_info (CDI_DOMINATORS);
5828 free_dominance_info (CDI_POST_DOMINATORS);
5829 delete_tree_cfg_annotations ();
5831 timevar_push (TV_OUT_OF_SSA);
5832 finish_out_of_ssa (&SA);
5833 timevar_pop (TV_OUT_OF_SSA);
5835 timevar_push (TV_POST_EXPAND);
5836 /* We are no longer in SSA form. */
5837 fun->gimple_df->in_ssa_p = false;
5838 loops_state_clear (LOOP_CLOSED_SSA);
5840 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5841 conservatively to true until they are all profile aware. */
5842 delete lab_rtx_for_bb;
5843 free_histograms ();
5845 construct_exit_block ();
5846 insn_locations_finalize ();
5848 if (var_ret_seq)
5850 rtx_insn *after = return_label;
5851 rtx_insn *next = NEXT_INSN (after);
5852 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5853 after = next;
5854 emit_insn_after (var_ret_seq, after);
5857 /* Zap the tree EH table. */
5858 set_eh_throw_stmt_table (fun, NULL);
5860 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5861 split edges which edge insertions might do. */
5862 rebuild_jump_labels (get_insns ());
5864 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
5865 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
5867 edge e;
5868 edge_iterator ei;
5869 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5871 if (e->insns.r)
5873 rebuild_jump_labels_chain (e->insns.r);
5874 /* Put insns after parm birth, but before
5875 NOTE_INSNS_FUNCTION_BEG. */
5876 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
5877 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
5879 rtx_insn *insns = e->insns.r;
5880 e->insns.r = NULL;
5881 if (NOTE_P (parm_birth_insn)
5882 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5883 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5884 else
5885 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
5887 else
5888 commit_one_edge_insertion (e);
5890 else
5891 ei_next (&ei);
5895 /* We're done expanding trees to RTL. */
5896 currently_expanding_to_rtl = 0;
5898 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
5899 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
5901 edge e;
5902 edge_iterator ei;
5903 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5905 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5906 e->flags &= ~EDGE_EXECUTABLE;
5908 /* At the moment not all abnormal edges match the RTL
5909 representation. It is safe to remove them here as
5910 find_many_sub_basic_blocks will rediscover them.
5911 In the future we should get this fixed properly. */
5912 if ((e->flags & EDGE_ABNORMAL)
5913 && !(e->flags & EDGE_SIBCALL))
5914 remove_edge (e);
5915 else
5916 ei_next (&ei);
5920 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
5921 bitmap_ones (blocks);
5922 find_many_sub_basic_blocks (blocks);
5923 sbitmap_free (blocks);
5924 purge_all_dead_edges ();
5926 expand_stack_alignment ();
5928 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5929 function. */
5930 if (crtl->tail_call_emit)
5931 fixup_tail_calls ();
5933 /* After initial rtl generation, call back to finish generating
5934 exception support code. We need to do this before cleaning up
5935 the CFG as the code does not expect dead landing pads. */
5936 if (fun->eh->region_tree != NULL)
5937 finish_eh_generation ();
5939 /* Remove unreachable blocks, otherwise we cannot compute dominators
5940 which are needed for loop state verification. As a side-effect
5941 this also compacts blocks.
5942 ??? We cannot remove trivially dead insns here as for example
5943 the DRAP reg on i?86 is not magically live at this point.
5944 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5945 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5947 #ifdef ENABLE_CHECKING
5948 verify_flow_info ();
5949 #endif
5951 /* Initialize pseudos allocated for hard registers. */
5952 emit_initial_value_sets ();
5954 /* And finally unshare all RTL. */
5955 unshare_all_rtl ();
5957 /* There's no need to defer outputting this function any more; we
5958 know we want to output it. */
5959 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5961 /* Now that we're done expanding trees to RTL, we shouldn't have any
5962 more CONCATs anywhere. */
5963 generating_concat_p = 0;
5965 if (dump_file)
5967 fprintf (dump_file,
5968 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5969 /* And the pass manager will dump RTL for us. */
5972 /* If we're emitting a nested function, make sure its parent gets
5973 emitted as well. Doing otherwise confuses debug info. */
5975 tree parent;
5976 for (parent = DECL_CONTEXT (current_function_decl);
5977 parent != NULL_TREE;
5978 parent = get_containing_scope (parent))
5979 if (TREE_CODE (parent) == FUNCTION_DECL)
5980 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
5983 /* We are now committed to emitting code for this function. Do any
5984 preparation, such as emitting abstract debug info for the inline
5985 before it gets mangled by optimization. */
5986 if (cgraph_function_possibly_inlined_p (current_function_decl))
5987 (*debug_hooks->outlining_inline_function) (current_function_decl);
5989 TREE_ASM_WRITTEN (current_function_decl) = 1;
5991 /* After expanding, the return labels are no longer needed. */
5992 return_label = NULL;
5993 naked_return_label = NULL;
5995 /* After expanding, the tm_restart map is no longer needed. */
5996 if (fun->gimple_df->tm_restart)
5998 htab_delete (fun->gimple_df->tm_restart);
5999 fun->gimple_df->tm_restart = NULL;
6002 /* Tag the blocks with a depth number so that change_scope can find
6003 the common parent easily. */
6004 set_block_levels (DECL_INITIAL (fun->decl), 0);
6005 default_rtl_profile ();
6007 timevar_pop (TV_POST_EXPAND);
6009 return 0;
6012 } // anon namespace
6014 rtl_opt_pass *
6015 make_pass_expand (gcc::context *ctxt)
6017 return new pass_expand (ctxt);