* c-c++-common/ubsan/float-cast-overflow-6.c: Add i?86-*-* target.
[official-gcc.git] / gcc / cfgexpand.c
blobd338a7c37572b6810fdd97e5b608e95fd3e99c45
1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "rtl.h"
25 #include "hard-reg-set.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "varasm.h"
29 #include "stor-layout.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "tm_p.h"
33 #include "predict.h"
34 #include "vec.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "machmode.h"
38 #include "input.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfgrtl.h"
43 #include "cfganal.h"
44 #include "cfgbuild.h"
45 #include "cfgcleanup.h"
46 #include "basic-block.h"
47 #include "insn-codes.h"
48 #include "optabs.h"
49 #include "expr.h"
50 #include "langhooks.h"
51 #include "bitmap.h"
52 #include "tree-ssa-alias.h"
53 #include "internal-fn.h"
54 #include "tree-eh.h"
55 #include "gimple-expr.h"
56 #include "is-a.h"
57 #include "gimple.h"
58 #include "gimple-iterator.h"
59 #include "gimple-walk.h"
60 #include "gimple-ssa.h"
61 #include "hash-map.h"
62 #include "plugin-api.h"
63 #include "ipa-ref.h"
64 #include "cgraph.h"
65 #include "tree-cfg.h"
66 #include "tree-phinodes.h"
67 #include "ssa-iterators.h"
68 #include "tree-ssanames.h"
69 #include "tree-dfa.h"
70 #include "tree-ssa.h"
71 #include "tree-pass.h"
72 #include "except.h"
73 #include "flags.h"
74 #include "diagnostic.h"
75 #include "gimple-pretty-print.h"
76 #include "toplev.h"
77 #include "debug.h"
78 #include "params.h"
79 #include "tree-inline.h"
80 #include "value-prof.h"
81 #include "target.h"
82 #include "tree-ssa-live.h"
83 #include "tree-outof-ssa.h"
84 #include "sbitmap.h"
85 #include "cfgloop.h"
86 #include "regs.h" /* For reg_renumber. */
87 #include "insn-attr.h" /* For INSN_SCHEDULING. */
88 #include "asan.h"
89 #include "tree-ssa-address.h"
90 #include "recog.h"
91 #include "output.h"
92 #include "builtins.h"
94 /* Some systems use __main in a way incompatible with its use in gcc, in these
95 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
96 give the same symbol without quotes for an alternative entry point. You
97 must define both, or neither. */
98 #ifndef NAME__MAIN
99 #define NAME__MAIN "__main"
100 #endif
102 /* This variable holds information helping the rewriting of SSA trees
103 into RTL. */
104 struct ssaexpand SA;
106 /* This variable holds the currently expanded gimple statement for purposes
107 of comminucating the profile info to the builtin expanders. */
108 gimple currently_expanding_gimple_stmt;
110 static rtx expand_debug_expr (tree);
112 /* Return an expression tree corresponding to the RHS of GIMPLE
113 statement STMT. */
115 tree
116 gimple_assign_rhs_to_tree (gimple stmt)
118 tree t;
119 enum gimple_rhs_class grhs_class;
121 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
123 if (grhs_class == GIMPLE_TERNARY_RHS)
124 t = build3 (gimple_assign_rhs_code (stmt),
125 TREE_TYPE (gimple_assign_lhs (stmt)),
126 gimple_assign_rhs1 (stmt),
127 gimple_assign_rhs2 (stmt),
128 gimple_assign_rhs3 (stmt));
129 else if (grhs_class == GIMPLE_BINARY_RHS)
130 t = build2 (gimple_assign_rhs_code (stmt),
131 TREE_TYPE (gimple_assign_lhs (stmt)),
132 gimple_assign_rhs1 (stmt),
133 gimple_assign_rhs2 (stmt));
134 else if (grhs_class == GIMPLE_UNARY_RHS)
135 t = build1 (gimple_assign_rhs_code (stmt),
136 TREE_TYPE (gimple_assign_lhs (stmt)),
137 gimple_assign_rhs1 (stmt));
138 else if (grhs_class == GIMPLE_SINGLE_RHS)
140 t = gimple_assign_rhs1 (stmt);
141 /* Avoid modifying this tree in place below. */
142 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
143 && gimple_location (stmt) != EXPR_LOCATION (t))
144 || (gimple_block (stmt)
145 && currently_expanding_to_rtl
146 && EXPR_P (t)))
147 t = copy_node (t);
149 else
150 gcc_unreachable ();
152 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
153 SET_EXPR_LOCATION (t, gimple_location (stmt));
155 return t;
159 #ifndef STACK_ALIGNMENT_NEEDED
160 #define STACK_ALIGNMENT_NEEDED 1
161 #endif
163 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
165 /* Associate declaration T with storage space X. If T is no
166 SSA name this is exactly SET_DECL_RTL, otherwise make the
167 partition of T associated with X. */
168 static inline void
169 set_rtl (tree t, rtx x)
171 if (TREE_CODE (t) == SSA_NAME)
173 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
174 if (x && !MEM_P (x))
175 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
176 /* For the benefit of debug information at -O0 (where vartracking
177 doesn't run) record the place also in the base DECL if it's
178 a normal variable (not a parameter). */
179 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
181 tree var = SSA_NAME_VAR (t);
182 /* If we don't yet have something recorded, just record it now. */
183 if (!DECL_RTL_SET_P (var))
184 SET_DECL_RTL (var, x);
185 /* If we have it set already to "multiple places" don't
186 change this. */
187 else if (DECL_RTL (var) == pc_rtx)
189 /* If we have something recorded and it's not the same place
190 as we want to record now, we have multiple partitions for the
191 same base variable, with different places. We can't just
192 randomly chose one, hence we have to say that we don't know.
193 This only happens with optimization, and there var-tracking
194 will figure out the right thing. */
195 else if (DECL_RTL (var) != x)
196 SET_DECL_RTL (var, pc_rtx);
199 else
200 SET_DECL_RTL (t, x);
203 /* This structure holds data relevant to one variable that will be
204 placed in a stack slot. */
205 struct stack_var
207 /* The Variable. */
208 tree decl;
210 /* Initially, the size of the variable. Later, the size of the partition,
211 if this variable becomes it's partition's representative. */
212 HOST_WIDE_INT size;
214 /* The *byte* alignment required for this variable. Or as, with the
215 size, the alignment for this partition. */
216 unsigned int alignb;
218 /* The partition representative. */
219 size_t representative;
221 /* The next stack variable in the partition, or EOC. */
222 size_t next;
224 /* The numbers of conflicting stack variables. */
225 bitmap conflicts;
228 #define EOC ((size_t)-1)
230 /* We have an array of such objects while deciding allocation. */
231 static struct stack_var *stack_vars;
232 static size_t stack_vars_alloc;
233 static size_t stack_vars_num;
234 static hash_map<tree, size_t> *decl_to_stack_part;
236 /* Conflict bitmaps go on this obstack. This allows us to destroy
237 all of them in one big sweep. */
238 static bitmap_obstack stack_var_bitmap_obstack;
240 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
241 is non-decreasing. */
242 static size_t *stack_vars_sorted;
244 /* The phase of the stack frame. This is the known misalignment of
245 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
246 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
247 static int frame_phase;
249 /* Used during expand_used_vars to remember if we saw any decls for
250 which we'd like to enable stack smashing protection. */
251 static bool has_protected_decls;
253 /* Used during expand_used_vars. Remember if we say a character buffer
254 smaller than our cutoff threshold. Used for -Wstack-protector. */
255 static bool has_short_buffer;
257 /* Compute the byte alignment to use for DECL. Ignore alignment
258 we can't do with expected alignment of the stack boundary. */
260 static unsigned int
261 align_local_variable (tree decl)
263 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
264 DECL_ALIGN (decl) = align;
265 return align / BITS_PER_UNIT;
268 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
269 Return the frame offset. */
271 static HOST_WIDE_INT
272 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
274 HOST_WIDE_INT offset, new_frame_offset;
276 new_frame_offset = frame_offset;
277 if (FRAME_GROWS_DOWNWARD)
279 new_frame_offset -= size + frame_phase;
280 new_frame_offset &= -align;
281 new_frame_offset += frame_phase;
282 offset = new_frame_offset;
284 else
286 new_frame_offset -= frame_phase;
287 new_frame_offset += align - 1;
288 new_frame_offset &= -align;
289 new_frame_offset += frame_phase;
290 offset = new_frame_offset;
291 new_frame_offset += size;
293 frame_offset = new_frame_offset;
295 if (frame_offset_overflow (frame_offset, cfun->decl))
296 frame_offset = offset = 0;
298 return offset;
301 /* Accumulate DECL into STACK_VARS. */
303 static void
304 add_stack_var (tree decl)
306 struct stack_var *v;
308 if (stack_vars_num >= stack_vars_alloc)
310 if (stack_vars_alloc)
311 stack_vars_alloc = stack_vars_alloc * 3 / 2;
312 else
313 stack_vars_alloc = 32;
314 stack_vars
315 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
317 if (!decl_to_stack_part)
318 decl_to_stack_part = new hash_map<tree, size_t>;
320 v = &stack_vars[stack_vars_num];
321 decl_to_stack_part->put (decl, stack_vars_num);
323 v->decl = decl;
324 v->size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (decl)));
325 /* Ensure that all variables have size, so that &a != &b for any two
326 variables that are simultaneously live. */
327 if (v->size == 0)
328 v->size = 1;
329 v->alignb = align_local_variable (SSAVAR (decl));
330 /* An alignment of zero can mightily confuse us later. */
331 gcc_assert (v->alignb != 0);
333 /* All variables are initially in their own partition. */
334 v->representative = stack_vars_num;
335 v->next = EOC;
337 /* All variables initially conflict with no other. */
338 v->conflicts = NULL;
340 /* Ensure that this decl doesn't get put onto the list twice. */
341 set_rtl (decl, pc_rtx);
343 stack_vars_num++;
346 /* Make the decls associated with luid's X and Y conflict. */
348 static void
349 add_stack_var_conflict (size_t x, size_t y)
351 struct stack_var *a = &stack_vars[x];
352 struct stack_var *b = &stack_vars[y];
353 if (!a->conflicts)
354 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
355 if (!b->conflicts)
356 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
357 bitmap_set_bit (a->conflicts, y);
358 bitmap_set_bit (b->conflicts, x);
361 /* Check whether the decls associated with luid's X and Y conflict. */
363 static bool
364 stack_var_conflict_p (size_t x, size_t y)
366 struct stack_var *a = &stack_vars[x];
367 struct stack_var *b = &stack_vars[y];
368 if (x == y)
369 return false;
370 /* Partitions containing an SSA name result from gimple registers
371 with things like unsupported modes. They are top-level and
372 hence conflict with everything else. */
373 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
374 return true;
376 if (!a->conflicts || !b->conflicts)
377 return false;
378 return bitmap_bit_p (a->conflicts, y);
381 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
382 enter its partition number into bitmap DATA. */
384 static bool
385 visit_op (gimple, tree op, tree, void *data)
387 bitmap active = (bitmap)data;
388 op = get_base_address (op);
389 if (op
390 && DECL_P (op)
391 && DECL_RTL_IF_SET (op) == pc_rtx)
393 size_t *v = decl_to_stack_part->get (op);
394 if (v)
395 bitmap_set_bit (active, *v);
397 return false;
400 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
401 record conflicts between it and all currently active other partitions
402 from bitmap DATA. */
404 static bool
405 visit_conflict (gimple, tree op, tree, void *data)
407 bitmap active = (bitmap)data;
408 op = get_base_address (op);
409 if (op
410 && DECL_P (op)
411 && DECL_RTL_IF_SET (op) == pc_rtx)
413 size_t *v = decl_to_stack_part->get (op);
414 if (v && bitmap_set_bit (active, *v))
416 size_t num = *v;
417 bitmap_iterator bi;
418 unsigned i;
419 gcc_assert (num < stack_vars_num);
420 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
421 add_stack_var_conflict (num, i);
424 return false;
427 /* Helper routine for add_scope_conflicts, calculating the active partitions
428 at the end of BB, leaving the result in WORK. We're called to generate
429 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
430 liveness. */
432 static void
433 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
435 edge e;
436 edge_iterator ei;
437 gimple_stmt_iterator gsi;
438 walk_stmt_load_store_addr_fn visit;
440 bitmap_clear (work);
441 FOR_EACH_EDGE (e, ei, bb->preds)
442 bitmap_ior_into (work, (bitmap)e->src->aux);
444 visit = visit_op;
446 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
448 gimple stmt = gsi_stmt (gsi);
449 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
451 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
453 gimple stmt = gsi_stmt (gsi);
455 if (gimple_clobber_p (stmt))
457 tree lhs = gimple_assign_lhs (stmt);
458 size_t *v;
459 /* Nested function lowering might introduce LHSs
460 that are COMPONENT_REFs. */
461 if (TREE_CODE (lhs) != VAR_DECL)
462 continue;
463 if (DECL_RTL_IF_SET (lhs) == pc_rtx
464 && (v = decl_to_stack_part->get (lhs)))
465 bitmap_clear_bit (work, *v);
467 else if (!is_gimple_debug (stmt))
469 if (for_conflict
470 && visit == visit_op)
472 /* If this is the first real instruction in this BB we need
473 to add conflicts for everything live at this point now.
474 Unlike classical liveness for named objects we can't
475 rely on seeing a def/use of the names we're interested in.
476 There might merely be indirect loads/stores. We'd not add any
477 conflicts for such partitions. */
478 bitmap_iterator bi;
479 unsigned i;
480 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
482 struct stack_var *a = &stack_vars[i];
483 if (!a->conflicts)
484 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
485 bitmap_ior_into (a->conflicts, work);
487 visit = visit_conflict;
489 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
494 /* Generate stack partition conflicts between all partitions that are
495 simultaneously live. */
497 static void
498 add_scope_conflicts (void)
500 basic_block bb;
501 bool changed;
502 bitmap work = BITMAP_ALLOC (NULL);
503 int *rpo;
504 int n_bbs;
506 /* We approximate the live range of a stack variable by taking the first
507 mention of its name as starting point(s), and by the end-of-scope
508 death clobber added by gimplify as ending point(s) of the range.
509 This overapproximates in the case we for instance moved an address-taken
510 operation upward, without also moving a dereference to it upwards.
511 But it's conservatively correct as a variable never can hold values
512 before its name is mentioned at least once.
514 We then do a mostly classical bitmap liveness algorithm. */
516 FOR_ALL_BB_FN (bb, cfun)
517 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
519 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
520 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
522 changed = true;
523 while (changed)
525 int i;
526 changed = false;
527 for (i = 0; i < n_bbs; i++)
529 bitmap active;
530 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
531 active = (bitmap)bb->aux;
532 add_scope_conflicts_1 (bb, work, false);
533 if (bitmap_ior_into (active, work))
534 changed = true;
538 FOR_EACH_BB_FN (bb, cfun)
539 add_scope_conflicts_1 (bb, work, true);
541 free (rpo);
542 BITMAP_FREE (work);
543 FOR_ALL_BB_FN (bb, cfun)
544 BITMAP_FREE (bb->aux);
547 /* A subroutine of partition_stack_vars. A comparison function for qsort,
548 sorting an array of indices by the properties of the object. */
550 static int
551 stack_var_cmp (const void *a, const void *b)
553 size_t ia = *(const size_t *)a;
554 size_t ib = *(const size_t *)b;
555 unsigned int aligna = stack_vars[ia].alignb;
556 unsigned int alignb = stack_vars[ib].alignb;
557 HOST_WIDE_INT sizea = stack_vars[ia].size;
558 HOST_WIDE_INT sizeb = stack_vars[ib].size;
559 tree decla = stack_vars[ia].decl;
560 tree declb = stack_vars[ib].decl;
561 bool largea, largeb;
562 unsigned int uida, uidb;
564 /* Primary compare on "large" alignment. Large comes first. */
565 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
566 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
567 if (largea != largeb)
568 return (int)largeb - (int)largea;
570 /* Secondary compare on size, decreasing */
571 if (sizea > sizeb)
572 return -1;
573 if (sizea < sizeb)
574 return 1;
576 /* Tertiary compare on true alignment, decreasing. */
577 if (aligna < alignb)
578 return -1;
579 if (aligna > alignb)
580 return 1;
582 /* Final compare on ID for sort stability, increasing.
583 Two SSA names are compared by their version, SSA names come before
584 non-SSA names, and two normal decls are compared by their DECL_UID. */
585 if (TREE_CODE (decla) == SSA_NAME)
587 if (TREE_CODE (declb) == SSA_NAME)
588 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
589 else
590 return -1;
592 else if (TREE_CODE (declb) == SSA_NAME)
593 return 1;
594 else
595 uida = DECL_UID (decla), uidb = DECL_UID (declb);
596 if (uida < uidb)
597 return 1;
598 if (uida > uidb)
599 return -1;
600 return 0;
603 struct part_traits : default_hashmap_traits
605 template<typename T>
606 static bool
607 is_deleted (T &e)
608 { return e.m_value == reinterpret_cast<void *> (1); }
610 template<typename T> static bool is_empty (T &e) { return e.m_value == NULL; }
611 template<typename T>
612 static void
613 mark_deleted (T &e)
614 { e.m_value = reinterpret_cast<T> (1); }
616 template<typename T>
617 static void
618 mark_empty (T &e)
619 { e.m_value = NULL; }
622 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
624 /* If the points-to solution *PI points to variables that are in a partition
625 together with other variables add all partition members to the pointed-to
626 variables bitmap. */
628 static void
629 add_partitioned_vars_to_ptset (struct pt_solution *pt,
630 part_hashmap *decls_to_partitions,
631 hash_set<bitmap> *visited, bitmap temp)
633 bitmap_iterator bi;
634 unsigned i;
635 bitmap *part;
637 if (pt->anything
638 || pt->vars == NULL
639 /* The pointed-to vars bitmap is shared, it is enough to
640 visit it once. */
641 || visited->add (pt->vars))
642 return;
644 bitmap_clear (temp);
646 /* By using a temporary bitmap to store all members of the partitions
647 we have to add we make sure to visit each of the partitions only
648 once. */
649 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
650 if ((!temp
651 || !bitmap_bit_p (temp, i))
652 && (part = decls_to_partitions->get (i)))
653 bitmap_ior_into (temp, *part);
654 if (!bitmap_empty_p (temp))
655 bitmap_ior_into (pt->vars, temp);
658 /* Update points-to sets based on partition info, so we can use them on RTL.
659 The bitmaps representing stack partitions will be saved until expand,
660 where partitioned decls used as bases in memory expressions will be
661 rewritten. */
663 static void
664 update_alias_info_with_stack_vars (void)
666 part_hashmap *decls_to_partitions = NULL;
667 size_t i, j;
668 tree var = NULL_TREE;
670 for (i = 0; i < stack_vars_num; i++)
672 bitmap part = NULL;
673 tree name;
674 struct ptr_info_def *pi;
676 /* Not interested in partitions with single variable. */
677 if (stack_vars[i].representative != i
678 || stack_vars[i].next == EOC)
679 continue;
681 if (!decls_to_partitions)
683 decls_to_partitions = new part_hashmap;
684 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
687 /* Create an SSA_NAME that points to the partition for use
688 as base during alias-oracle queries on RTL for bases that
689 have been partitioned. */
690 if (var == NULL_TREE)
691 var = create_tmp_var (ptr_type_node, NULL);
692 name = make_ssa_name (var, NULL);
694 /* Create bitmaps representing partitions. They will be used for
695 points-to sets later, so use GGC alloc. */
696 part = BITMAP_GGC_ALLOC ();
697 for (j = i; j != EOC; j = stack_vars[j].next)
699 tree decl = stack_vars[j].decl;
700 unsigned int uid = DECL_PT_UID (decl);
701 bitmap_set_bit (part, uid);
702 decls_to_partitions->put (uid, part);
703 cfun->gimple_df->decls_to_pointers->put (decl, name);
704 if (TREE_ADDRESSABLE (decl))
705 TREE_ADDRESSABLE (name) = 1;
708 /* Make the SSA name point to all partition members. */
709 pi = get_ptr_info (name);
710 pt_solution_set (&pi->pt, part, false);
713 /* Make all points-to sets that contain one member of a partition
714 contain all members of the partition. */
715 if (decls_to_partitions)
717 unsigned i;
718 hash_set<bitmap> visited;
719 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
721 for (i = 1; i < num_ssa_names; i++)
723 tree name = ssa_name (i);
724 struct ptr_info_def *pi;
726 if (name
727 && POINTER_TYPE_P (TREE_TYPE (name))
728 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
729 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
730 &visited, temp);
733 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
734 decls_to_partitions, &visited, temp);
736 delete decls_to_partitions;
737 BITMAP_FREE (temp);
741 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
742 partitioning algorithm. Partitions A and B are known to be non-conflicting.
743 Merge them into a single partition A. */
745 static void
746 union_stack_vars (size_t a, size_t b)
748 struct stack_var *vb = &stack_vars[b];
749 bitmap_iterator bi;
750 unsigned u;
752 gcc_assert (stack_vars[b].next == EOC);
753 /* Add B to A's partition. */
754 stack_vars[b].next = stack_vars[a].next;
755 stack_vars[b].representative = a;
756 stack_vars[a].next = b;
758 /* Update the required alignment of partition A to account for B. */
759 if (stack_vars[a].alignb < stack_vars[b].alignb)
760 stack_vars[a].alignb = stack_vars[b].alignb;
762 /* Update the interference graph and merge the conflicts. */
763 if (vb->conflicts)
765 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
766 add_stack_var_conflict (a, stack_vars[u].representative);
767 BITMAP_FREE (vb->conflicts);
771 /* A subroutine of expand_used_vars. Binpack the variables into
772 partitions constrained by the interference graph. The overall
773 algorithm used is as follows:
775 Sort the objects by size in descending order.
776 For each object A {
777 S = size(A)
778 O = 0
779 loop {
780 Look for the largest non-conflicting object B with size <= S.
781 UNION (A, B)
786 static void
787 partition_stack_vars (void)
789 size_t si, sj, n = stack_vars_num;
791 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
792 for (si = 0; si < n; ++si)
793 stack_vars_sorted[si] = si;
795 if (n == 1)
796 return;
798 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
800 for (si = 0; si < n; ++si)
802 size_t i = stack_vars_sorted[si];
803 unsigned int ialign = stack_vars[i].alignb;
804 HOST_WIDE_INT isize = stack_vars[i].size;
806 /* Ignore objects that aren't partition representatives. If we
807 see a var that is not a partition representative, it must
808 have been merged earlier. */
809 if (stack_vars[i].representative != i)
810 continue;
812 for (sj = si + 1; sj < n; ++sj)
814 size_t j = stack_vars_sorted[sj];
815 unsigned int jalign = stack_vars[j].alignb;
816 HOST_WIDE_INT jsize = stack_vars[j].size;
818 /* Ignore objects that aren't partition representatives. */
819 if (stack_vars[j].representative != j)
820 continue;
822 /* Do not mix objects of "small" (supported) alignment
823 and "large" (unsupported) alignment. */
824 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
825 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
826 break;
828 /* For Address Sanitizer do not mix objects with different
829 sizes, as the shorter vars wouldn't be adequately protected.
830 Don't do that for "large" (unsupported) alignment objects,
831 those aren't protected anyway. */
832 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && isize != jsize
833 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
834 break;
836 /* Ignore conflicting objects. */
837 if (stack_var_conflict_p (i, j))
838 continue;
840 /* UNION the objects, placing J at OFFSET. */
841 union_stack_vars (i, j);
845 update_alias_info_with_stack_vars ();
848 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
850 static void
851 dump_stack_var_partition (void)
853 size_t si, i, j, n = stack_vars_num;
855 for (si = 0; si < n; ++si)
857 i = stack_vars_sorted[si];
859 /* Skip variables that aren't partition representatives, for now. */
860 if (stack_vars[i].representative != i)
861 continue;
863 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
864 " align %u\n", (unsigned long) i, stack_vars[i].size,
865 stack_vars[i].alignb);
867 for (j = i; j != EOC; j = stack_vars[j].next)
869 fputc ('\t', dump_file);
870 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
872 fputc ('\n', dump_file);
876 /* Assign rtl to DECL at BASE + OFFSET. */
878 static void
879 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
880 HOST_WIDE_INT offset)
882 unsigned align;
883 rtx x;
885 /* If this fails, we've overflowed the stack frame. Error nicely? */
886 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
888 x = plus_constant (Pmode, base, offset);
889 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
891 if (TREE_CODE (decl) != SSA_NAME)
893 /* Set alignment we actually gave this decl if it isn't an SSA name.
894 If it is we generate stack slots only accidentally so it isn't as
895 important, we'll simply use the alignment that is already set. */
896 if (base == virtual_stack_vars_rtx)
897 offset -= frame_phase;
898 align = offset & -offset;
899 align *= BITS_PER_UNIT;
900 if (align == 0 || align > base_align)
901 align = base_align;
903 /* One would think that we could assert that we're not decreasing
904 alignment here, but (at least) the i386 port does exactly this
905 via the MINIMUM_ALIGNMENT hook. */
907 DECL_ALIGN (decl) = align;
908 DECL_USER_ALIGN (decl) = 0;
911 set_mem_attributes (x, SSAVAR (decl), true);
912 set_rtl (decl, x);
915 struct stack_vars_data
917 /* Vector of offset pairs, always end of some padding followed
918 by start of the padding that needs Address Sanitizer protection.
919 The vector is in reversed, highest offset pairs come first. */
920 vec<HOST_WIDE_INT> asan_vec;
922 /* Vector of partition representative decls in between the paddings. */
923 vec<tree> asan_decl_vec;
925 /* Base pseudo register for Address Sanitizer protected automatic vars. */
926 rtx asan_base;
928 /* Alignment needed for the Address Sanitizer protected automatic vars. */
929 unsigned int asan_alignb;
932 /* A subroutine of expand_used_vars. Give each partition representative
933 a unique location within the stack frame. Update each partition member
934 with that location. */
936 static void
937 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
939 size_t si, i, j, n = stack_vars_num;
940 HOST_WIDE_INT large_size = 0, large_alloc = 0;
941 rtx large_base = NULL;
942 unsigned large_align = 0;
943 tree decl;
945 /* Determine if there are any variables requiring "large" alignment.
946 Since these are dynamically allocated, we only process these if
947 no predicate involved. */
948 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
949 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
951 /* Find the total size of these variables. */
952 for (si = 0; si < n; ++si)
954 unsigned alignb;
956 i = stack_vars_sorted[si];
957 alignb = stack_vars[i].alignb;
959 /* Stop when we get to the first decl with "small" alignment. */
960 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
961 break;
963 /* Skip variables that aren't partition representatives. */
964 if (stack_vars[i].representative != i)
965 continue;
967 /* Skip variables that have already had rtl assigned. See also
968 add_stack_var where we perpetrate this pc_rtx hack. */
969 decl = stack_vars[i].decl;
970 if ((TREE_CODE (decl) == SSA_NAME
971 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
972 : DECL_RTL (decl)) != pc_rtx)
973 continue;
975 large_size += alignb - 1;
976 large_size &= -(HOST_WIDE_INT)alignb;
977 large_size += stack_vars[i].size;
980 /* If there were any, allocate space. */
981 if (large_size > 0)
982 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
983 large_align, true);
986 for (si = 0; si < n; ++si)
988 rtx base;
989 unsigned base_align, alignb;
990 HOST_WIDE_INT offset;
992 i = stack_vars_sorted[si];
994 /* Skip variables that aren't partition representatives, for now. */
995 if (stack_vars[i].representative != i)
996 continue;
998 /* Skip variables that have already had rtl assigned. See also
999 add_stack_var where we perpetrate this pc_rtx hack. */
1000 decl = stack_vars[i].decl;
1001 if ((TREE_CODE (decl) == SSA_NAME
1002 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
1003 : DECL_RTL (decl)) != pc_rtx)
1004 continue;
1006 /* Check the predicate to see whether this variable should be
1007 allocated in this pass. */
1008 if (pred && !pred (i))
1009 continue;
1011 alignb = stack_vars[i].alignb;
1012 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1014 base = virtual_stack_vars_rtx;
1015 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK && pred)
1017 HOST_WIDE_INT prev_offset = frame_offset;
1018 tree repr_decl = NULL_TREE;
1020 offset
1021 = alloc_stack_frame_space (stack_vars[i].size
1022 + ASAN_RED_ZONE_SIZE,
1023 MAX (alignb, ASAN_RED_ZONE_SIZE));
1024 data->asan_vec.safe_push (prev_offset);
1025 data->asan_vec.safe_push (offset + stack_vars[i].size);
1026 /* Find best representative of the partition.
1027 Prefer those with DECL_NAME, even better
1028 satisfying asan_protect_stack_decl predicate. */
1029 for (j = i; j != EOC; j = stack_vars[j].next)
1030 if (asan_protect_stack_decl (stack_vars[j].decl)
1031 && DECL_NAME (stack_vars[j].decl))
1033 repr_decl = stack_vars[j].decl;
1034 break;
1036 else if (repr_decl == NULL_TREE
1037 && DECL_P (stack_vars[j].decl)
1038 && DECL_NAME (stack_vars[j].decl))
1039 repr_decl = stack_vars[j].decl;
1040 if (repr_decl == NULL_TREE)
1041 repr_decl = stack_vars[i].decl;
1042 data->asan_decl_vec.safe_push (repr_decl);
1043 data->asan_alignb = MAX (data->asan_alignb, alignb);
1044 if (data->asan_base == NULL)
1045 data->asan_base = gen_reg_rtx (Pmode);
1046 base = data->asan_base;
1048 if (!STRICT_ALIGNMENT)
1049 base_align = crtl->max_used_stack_slot_alignment;
1050 else
1051 base_align = MAX (crtl->max_used_stack_slot_alignment,
1052 GET_MODE_ALIGNMENT (SImode)
1053 << ASAN_SHADOW_SHIFT);
1055 else
1057 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1058 base_align = crtl->max_used_stack_slot_alignment;
1061 else
1063 /* Large alignment is only processed in the last pass. */
1064 if (pred)
1065 continue;
1066 gcc_assert (large_base != NULL);
1068 large_alloc += alignb - 1;
1069 large_alloc &= -(HOST_WIDE_INT)alignb;
1070 offset = large_alloc;
1071 large_alloc += stack_vars[i].size;
1073 base = large_base;
1074 base_align = large_align;
1077 /* Create rtl for each variable based on their location within the
1078 partition. */
1079 for (j = i; j != EOC; j = stack_vars[j].next)
1081 expand_one_stack_var_at (stack_vars[j].decl,
1082 base, base_align,
1083 offset);
1087 gcc_assert (large_alloc == large_size);
1090 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1091 static HOST_WIDE_INT
1092 account_stack_vars (void)
1094 size_t si, j, i, n = stack_vars_num;
1095 HOST_WIDE_INT size = 0;
1097 for (si = 0; si < n; ++si)
1099 i = stack_vars_sorted[si];
1101 /* Skip variables that aren't partition representatives, for now. */
1102 if (stack_vars[i].representative != i)
1103 continue;
1105 size += stack_vars[i].size;
1106 for (j = i; j != EOC; j = stack_vars[j].next)
1107 set_rtl (stack_vars[j].decl, NULL);
1109 return size;
1112 /* A subroutine of expand_one_var. Called to immediately assign rtl
1113 to a variable to be allocated in the stack frame. */
1115 static void
1116 expand_one_stack_var (tree var)
1118 HOST_WIDE_INT size, offset;
1119 unsigned byte_align;
1121 size = tree_to_uhwi (DECL_SIZE_UNIT (SSAVAR (var)));
1122 byte_align = align_local_variable (SSAVAR (var));
1124 /* We handle highly aligned variables in expand_stack_vars. */
1125 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1127 offset = alloc_stack_frame_space (size, byte_align);
1129 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1130 crtl->max_used_stack_slot_alignment, offset);
1133 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1134 that will reside in a hard register. */
1136 static void
1137 expand_one_hard_reg_var (tree var)
1139 rest_of_decl_compilation (var, 0, 0);
1142 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1143 that will reside in a pseudo register. */
1145 static void
1146 expand_one_register_var (tree var)
1148 tree decl = SSAVAR (var);
1149 tree type = TREE_TYPE (decl);
1150 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1151 rtx x = gen_reg_rtx (reg_mode);
1153 set_rtl (var, x);
1155 /* Note if the object is a user variable. */
1156 if (!DECL_ARTIFICIAL (decl))
1157 mark_user_reg (x);
1159 if (POINTER_TYPE_P (type))
1160 mark_reg_pointer (x, get_pointer_alignment (var));
1163 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1164 has some associated error, e.g. its type is error-mark. We just need
1165 to pick something that won't crash the rest of the compiler. */
1167 static void
1168 expand_one_error_var (tree var)
1170 machine_mode mode = DECL_MODE (var);
1171 rtx x;
1173 if (mode == BLKmode)
1174 x = gen_rtx_MEM (BLKmode, const0_rtx);
1175 else if (mode == VOIDmode)
1176 x = const0_rtx;
1177 else
1178 x = gen_reg_rtx (mode);
1180 SET_DECL_RTL (var, x);
1183 /* A subroutine of expand_one_var. VAR is a variable that will be
1184 allocated to the local stack frame. Return true if we wish to
1185 add VAR to STACK_VARS so that it will be coalesced with other
1186 variables. Return false to allocate VAR immediately.
1188 This function is used to reduce the number of variables considered
1189 for coalescing, which reduces the size of the quadratic problem. */
1191 static bool
1192 defer_stack_allocation (tree var, bool toplevel)
1194 /* Whether the variable is small enough for immediate allocation not to be
1195 a problem with regard to the frame size. */
1196 bool smallish
1197 = ((HOST_WIDE_INT) tree_to_uhwi (DECL_SIZE_UNIT (var))
1198 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1200 /* If stack protection is enabled, *all* stack variables must be deferred,
1201 so that we can re-order the strings to the top of the frame.
1202 Similarly for Address Sanitizer. */
1203 if (flag_stack_protect || ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK))
1204 return true;
1206 /* We handle "large" alignment via dynamic allocation. We want to handle
1207 this extra complication in only one place, so defer them. */
1208 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1209 return true;
1211 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1212 might be detached from their block and appear at toplevel when we reach
1213 here. We want to coalesce them with variables from other blocks when
1214 the immediate contribution to the frame size would be noticeable. */
1215 if (toplevel && optimize > 0 && DECL_IGNORED_P (var) && !smallish)
1216 return true;
1218 /* Variables declared in the outermost scope automatically conflict
1219 with every other variable. The only reason to want to defer them
1220 at all is that, after sorting, we can more efficiently pack
1221 small variables in the stack frame. Continue to defer at -O2. */
1222 if (toplevel && optimize < 2)
1223 return false;
1225 /* Without optimization, *most* variables are allocated from the
1226 stack, which makes the quadratic problem large exactly when we
1227 want compilation to proceed as quickly as possible. On the
1228 other hand, we don't want the function's stack frame size to
1229 get completely out of hand. So we avoid adding scalars and
1230 "small" aggregates to the list at all. */
1231 if (optimize == 0 && smallish)
1232 return false;
1234 return true;
1237 /* A subroutine of expand_used_vars. Expand one variable according to
1238 its flavor. Variables to be placed on the stack are not actually
1239 expanded yet, merely recorded.
1240 When REALLY_EXPAND is false, only add stack values to be allocated.
1241 Return stack usage this variable is supposed to take.
1244 static HOST_WIDE_INT
1245 expand_one_var (tree var, bool toplevel, bool really_expand)
1247 unsigned int align = BITS_PER_UNIT;
1248 tree origvar = var;
1250 var = SSAVAR (var);
1252 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1254 /* Because we don't know if VAR will be in register or on stack,
1255 we conservatively assume it will be on stack even if VAR is
1256 eventually put into register after RA pass. For non-automatic
1257 variables, which won't be on stack, we collect alignment of
1258 type and ignore user specified alignment. Similarly for
1259 SSA_NAMEs for which use_register_for_decl returns true. */
1260 if (TREE_STATIC (var)
1261 || DECL_EXTERNAL (var)
1262 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1263 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1264 TYPE_MODE (TREE_TYPE (var)),
1265 TYPE_ALIGN (TREE_TYPE (var)));
1266 else if (DECL_HAS_VALUE_EXPR_P (var)
1267 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1268 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1269 or variables which were assigned a stack slot already by
1270 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1271 changed from the offset chosen to it. */
1272 align = crtl->stack_alignment_estimated;
1273 else
1274 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1276 /* If the variable alignment is very large we'll dynamicaly allocate
1277 it, which means that in-frame portion is just a pointer. */
1278 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1279 align = POINTER_SIZE;
1282 if (SUPPORTS_STACK_ALIGNMENT
1283 && crtl->stack_alignment_estimated < align)
1285 /* stack_alignment_estimated shouldn't change after stack
1286 realign decision made */
1287 gcc_assert (!crtl->stack_realign_processed);
1288 crtl->stack_alignment_estimated = align;
1291 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1292 So here we only make sure stack_alignment_needed >= align. */
1293 if (crtl->stack_alignment_needed < align)
1294 crtl->stack_alignment_needed = align;
1295 if (crtl->max_used_stack_slot_alignment < align)
1296 crtl->max_used_stack_slot_alignment = align;
1298 if (TREE_CODE (origvar) == SSA_NAME)
1300 gcc_assert (TREE_CODE (var) != VAR_DECL
1301 || (!DECL_EXTERNAL (var)
1302 && !DECL_HAS_VALUE_EXPR_P (var)
1303 && !TREE_STATIC (var)
1304 && TREE_TYPE (var) != error_mark_node
1305 && !DECL_HARD_REGISTER (var)
1306 && really_expand));
1308 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1310 else if (DECL_EXTERNAL (var))
1312 else if (DECL_HAS_VALUE_EXPR_P (var))
1314 else if (TREE_STATIC (var))
1316 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1318 else if (TREE_TYPE (var) == error_mark_node)
1320 if (really_expand)
1321 expand_one_error_var (var);
1323 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1325 if (really_expand)
1327 expand_one_hard_reg_var (var);
1328 if (!DECL_HARD_REGISTER (var))
1329 /* Invalid register specification. */
1330 expand_one_error_var (var);
1333 else if (use_register_for_decl (var))
1335 if (really_expand)
1336 expand_one_register_var (origvar);
1338 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1340 /* Reject variables which cover more than half of the address-space. */
1341 if (really_expand)
1343 error ("size of variable %q+D is too large", var);
1344 expand_one_error_var (var);
1347 else if (defer_stack_allocation (var, toplevel))
1348 add_stack_var (origvar);
1349 else
1351 if (really_expand)
1352 expand_one_stack_var (origvar);
1353 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1355 return 0;
1358 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1359 expanding variables. Those variables that can be put into registers
1360 are allocated pseudos; those that can't are put on the stack.
1362 TOPLEVEL is true if this is the outermost BLOCK. */
1364 static void
1365 expand_used_vars_for_block (tree block, bool toplevel)
1367 tree t;
1369 /* Expand all variables at this level. */
1370 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1371 if (TREE_USED (t)
1372 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1373 || !DECL_NONSHAREABLE (t)))
1374 expand_one_var (t, toplevel, true);
1376 /* Expand all variables at containing levels. */
1377 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1378 expand_used_vars_for_block (t, false);
1381 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1382 and clear TREE_USED on all local variables. */
1384 static void
1385 clear_tree_used (tree block)
1387 tree t;
1389 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1390 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1391 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1392 || !DECL_NONSHAREABLE (t))
1393 TREE_USED (t) = 0;
1395 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1396 clear_tree_used (t);
1399 enum {
1400 SPCT_FLAG_DEFAULT = 1,
1401 SPCT_FLAG_ALL = 2,
1402 SPCT_FLAG_STRONG = 3
1405 /* Examine TYPE and determine a bit mask of the following features. */
1407 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1408 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1409 #define SPCT_HAS_ARRAY 4
1410 #define SPCT_HAS_AGGREGATE 8
1412 static unsigned int
1413 stack_protect_classify_type (tree type)
1415 unsigned int ret = 0;
1416 tree t;
1418 switch (TREE_CODE (type))
1420 case ARRAY_TYPE:
1421 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1422 if (t == char_type_node
1423 || t == signed_char_type_node
1424 || t == unsigned_char_type_node)
1426 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1427 unsigned HOST_WIDE_INT len;
1429 if (!TYPE_SIZE_UNIT (type)
1430 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1431 len = max;
1432 else
1433 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1435 if (len < max)
1436 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1437 else
1438 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1440 else
1441 ret = SPCT_HAS_ARRAY;
1442 break;
1444 case UNION_TYPE:
1445 case QUAL_UNION_TYPE:
1446 case RECORD_TYPE:
1447 ret = SPCT_HAS_AGGREGATE;
1448 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1449 if (TREE_CODE (t) == FIELD_DECL)
1450 ret |= stack_protect_classify_type (TREE_TYPE (t));
1451 break;
1453 default:
1454 break;
1457 return ret;
1460 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1461 part of the local stack frame. Remember if we ever return nonzero for
1462 any variable in this function. The return value is the phase number in
1463 which the variable should be allocated. */
1465 static int
1466 stack_protect_decl_phase (tree decl)
1468 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1469 int ret = 0;
1471 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1472 has_short_buffer = true;
1474 if (flag_stack_protect == SPCT_FLAG_ALL
1475 || flag_stack_protect == SPCT_FLAG_STRONG)
1477 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1478 && !(bits & SPCT_HAS_AGGREGATE))
1479 ret = 1;
1480 else if (bits & SPCT_HAS_ARRAY)
1481 ret = 2;
1483 else
1484 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1486 if (ret)
1487 has_protected_decls = true;
1489 return ret;
1492 /* Two helper routines that check for phase 1 and phase 2. These are used
1493 as callbacks for expand_stack_vars. */
1495 static bool
1496 stack_protect_decl_phase_1 (size_t i)
1498 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1501 static bool
1502 stack_protect_decl_phase_2 (size_t i)
1504 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1507 /* And helper function that checks for asan phase (with stack protector
1508 it is phase 3). This is used as callback for expand_stack_vars.
1509 Returns true if any of the vars in the partition need to be protected. */
1511 static bool
1512 asan_decl_phase_3 (size_t i)
1514 while (i != EOC)
1516 if (asan_protect_stack_decl (stack_vars[i].decl))
1517 return true;
1518 i = stack_vars[i].next;
1520 return false;
1523 /* Ensure that variables in different stack protection phases conflict
1524 so that they are not merged and share the same stack slot. */
1526 static void
1527 add_stack_protection_conflicts (void)
1529 size_t i, j, n = stack_vars_num;
1530 unsigned char *phase;
1532 phase = XNEWVEC (unsigned char, n);
1533 for (i = 0; i < n; ++i)
1534 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1536 for (i = 0; i < n; ++i)
1538 unsigned char ph_i = phase[i];
1539 for (j = i + 1; j < n; ++j)
1540 if (ph_i != phase[j])
1541 add_stack_var_conflict (i, j);
1544 XDELETEVEC (phase);
1547 /* Create a decl for the guard at the top of the stack frame. */
1549 static void
1550 create_stack_guard (void)
1552 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1553 VAR_DECL, NULL, ptr_type_node);
1554 TREE_THIS_VOLATILE (guard) = 1;
1555 TREE_USED (guard) = 1;
1556 expand_one_stack_var (guard);
1557 crtl->stack_protect_guard = guard;
1560 /* Prepare for expanding variables. */
1561 static void
1562 init_vars_expansion (void)
1564 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1565 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1567 /* A map from decl to stack partition. */
1568 decl_to_stack_part = new hash_map<tree, size_t>;
1570 /* Initialize local stack smashing state. */
1571 has_protected_decls = false;
1572 has_short_buffer = false;
1575 /* Free up stack variable graph data. */
1576 static void
1577 fini_vars_expansion (void)
1579 bitmap_obstack_release (&stack_var_bitmap_obstack);
1580 if (stack_vars)
1581 XDELETEVEC (stack_vars);
1582 if (stack_vars_sorted)
1583 XDELETEVEC (stack_vars_sorted);
1584 stack_vars = NULL;
1585 stack_vars_sorted = NULL;
1586 stack_vars_alloc = stack_vars_num = 0;
1587 delete decl_to_stack_part;
1588 decl_to_stack_part = NULL;
1591 /* Make a fair guess for the size of the stack frame of the function
1592 in NODE. This doesn't have to be exact, the result is only used in
1593 the inline heuristics. So we don't want to run the full stack var
1594 packing algorithm (which is quadratic in the number of stack vars).
1595 Instead, we calculate the total size of all stack vars. This turns
1596 out to be a pretty fair estimate -- packing of stack vars doesn't
1597 happen very often. */
1599 HOST_WIDE_INT
1600 estimated_stack_frame_size (struct cgraph_node *node)
1602 HOST_WIDE_INT size = 0;
1603 size_t i;
1604 tree var;
1605 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1607 push_cfun (fn);
1609 init_vars_expansion ();
1611 FOR_EACH_LOCAL_DECL (fn, i, var)
1612 if (auto_var_in_fn_p (var, fn->decl))
1613 size += expand_one_var (var, true, false);
1615 if (stack_vars_num > 0)
1617 /* Fake sorting the stack vars for account_stack_vars (). */
1618 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1619 for (i = 0; i < stack_vars_num; ++i)
1620 stack_vars_sorted[i] = i;
1621 size += account_stack_vars ();
1624 fini_vars_expansion ();
1625 pop_cfun ();
1626 return size;
1629 /* Helper routine to check if a record or union contains an array field. */
1631 static int
1632 record_or_union_type_has_array_p (const_tree tree_type)
1634 tree fields = TYPE_FIELDS (tree_type);
1635 tree f;
1637 for (f = fields; f; f = DECL_CHAIN (f))
1638 if (TREE_CODE (f) == FIELD_DECL)
1640 tree field_type = TREE_TYPE (f);
1641 if (RECORD_OR_UNION_TYPE_P (field_type)
1642 && record_or_union_type_has_array_p (field_type))
1643 return 1;
1644 if (TREE_CODE (field_type) == ARRAY_TYPE)
1645 return 1;
1647 return 0;
1650 /* Check if the current function has local referenced variables that
1651 have their addresses taken, contain an array, or are arrays. */
1653 static bool
1654 stack_protect_decl_p ()
1656 unsigned i;
1657 tree var;
1659 FOR_EACH_LOCAL_DECL (cfun, i, var)
1660 if (!is_global_var (var))
1662 tree var_type = TREE_TYPE (var);
1663 if (TREE_CODE (var) == VAR_DECL
1664 && (TREE_CODE (var_type) == ARRAY_TYPE
1665 || TREE_ADDRESSABLE (var)
1666 || (RECORD_OR_UNION_TYPE_P (var_type)
1667 && record_or_union_type_has_array_p (var_type))))
1668 return true;
1670 return false;
1673 /* Check if the current function has calls that use a return slot. */
1675 static bool
1676 stack_protect_return_slot_p ()
1678 basic_block bb;
1680 FOR_ALL_BB_FN (bb, cfun)
1681 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1682 !gsi_end_p (gsi); gsi_next (&gsi))
1684 gimple stmt = gsi_stmt (gsi);
1685 /* This assumes that calls to internal-only functions never
1686 use a return slot. */
1687 if (is_gimple_call (stmt)
1688 && !gimple_call_internal_p (stmt)
1689 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1690 gimple_call_fndecl (stmt)))
1691 return true;
1693 return false;
1696 /* Expand all variables used in the function. */
1698 static rtx_insn *
1699 expand_used_vars (void)
1701 tree var, outer_block = DECL_INITIAL (current_function_decl);
1702 vec<tree> maybe_local_decls = vNULL;
1703 rtx_insn *var_end_seq = NULL;
1704 unsigned i;
1705 unsigned len;
1706 bool gen_stack_protect_signal = false;
1708 /* Compute the phase of the stack frame for this function. */
1710 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1711 int off = STARTING_FRAME_OFFSET % align;
1712 frame_phase = off ? align - off : 0;
1715 /* Set TREE_USED on all variables in the local_decls. */
1716 FOR_EACH_LOCAL_DECL (cfun, i, var)
1717 TREE_USED (var) = 1;
1718 /* Clear TREE_USED on all variables associated with a block scope. */
1719 clear_tree_used (DECL_INITIAL (current_function_decl));
1721 init_vars_expansion ();
1723 hash_map<tree, tree> ssa_name_decls;
1724 for (i = 0; i < SA.map->num_partitions; i++)
1726 tree var = partition_to_var (SA.map, i);
1728 gcc_assert (!virtual_operand_p (var));
1730 /* Assign decls to each SSA name partition, share decls for partitions
1731 we could have coalesced (those with the same type). */
1732 if (SSA_NAME_VAR (var) == NULL_TREE)
1734 tree *slot = &ssa_name_decls.get_or_insert (TREE_TYPE (var));
1735 if (!*slot)
1736 *slot = create_tmp_reg (TREE_TYPE (var), NULL);
1737 replace_ssa_name_symbol (var, *slot);
1740 /* Always allocate space for partitions based on VAR_DECLs. But for
1741 those based on PARM_DECLs or RESULT_DECLs and which matter for the
1742 debug info, there is no need to do so if optimization is disabled
1743 because all the SSA_NAMEs based on these DECLs have been coalesced
1744 into a single partition, which is thus assigned the canonical RTL
1745 location of the DECLs. If in_lto_p, we can't rely on optimize,
1746 a function could be compiled with -O1 -flto first and only the
1747 link performed at -O0. */
1748 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1749 expand_one_var (var, true, true);
1750 else if (DECL_IGNORED_P (SSA_NAME_VAR (var)) || optimize || in_lto_p)
1752 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1753 contain the default def (representing the parm or result itself)
1754 we don't do anything here. But those which don't contain the
1755 default def (representing a temporary based on the parm/result)
1756 we need to allocate space just like for normal VAR_DECLs. */
1757 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1759 expand_one_var (var, true, true);
1760 gcc_assert (SA.partition_to_pseudo[i]);
1765 if (flag_stack_protect == SPCT_FLAG_STRONG)
1766 gen_stack_protect_signal
1767 = stack_protect_decl_p () || stack_protect_return_slot_p ();
1769 /* At this point all variables on the local_decls with TREE_USED
1770 set are not associated with any block scope. Lay them out. */
1772 len = vec_safe_length (cfun->local_decls);
1773 FOR_EACH_LOCAL_DECL (cfun, i, var)
1775 bool expand_now = false;
1777 /* Expanded above already. */
1778 if (is_gimple_reg (var))
1780 TREE_USED (var) = 0;
1781 goto next;
1783 /* We didn't set a block for static or extern because it's hard
1784 to tell the difference between a global variable (re)declared
1785 in a local scope, and one that's really declared there to
1786 begin with. And it doesn't really matter much, since we're
1787 not giving them stack space. Expand them now. */
1788 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1789 expand_now = true;
1791 /* Expand variables not associated with any block now. Those created by
1792 the optimizers could be live anywhere in the function. Those that
1793 could possibly have been scoped originally and detached from their
1794 block will have their allocation deferred so we coalesce them with
1795 others when optimization is enabled. */
1796 else if (TREE_USED (var))
1797 expand_now = true;
1799 /* Finally, mark all variables on the list as used. We'll use
1800 this in a moment when we expand those associated with scopes. */
1801 TREE_USED (var) = 1;
1803 if (expand_now)
1804 expand_one_var (var, true, true);
1806 next:
1807 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1809 rtx rtl = DECL_RTL_IF_SET (var);
1811 /* Keep artificial non-ignored vars in cfun->local_decls
1812 chain until instantiate_decls. */
1813 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1814 add_local_decl (cfun, var);
1815 else if (rtl == NULL_RTX)
1816 /* If rtl isn't set yet, which can happen e.g. with
1817 -fstack-protector, retry before returning from this
1818 function. */
1819 maybe_local_decls.safe_push (var);
1823 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1825 +-----------------+-----------------+
1826 | ...processed... | ...duplicates...|
1827 +-----------------+-----------------+
1829 +-- LEN points here.
1831 We just want the duplicates, as those are the artificial
1832 non-ignored vars that we want to keep until instantiate_decls.
1833 Move them down and truncate the array. */
1834 if (!vec_safe_is_empty (cfun->local_decls))
1835 cfun->local_decls->block_remove (0, len);
1837 /* At this point, all variables within the block tree with TREE_USED
1838 set are actually used by the optimized function. Lay them out. */
1839 expand_used_vars_for_block (outer_block, true);
1841 if (stack_vars_num > 0)
1843 add_scope_conflicts ();
1845 /* If stack protection is enabled, we don't share space between
1846 vulnerable data and non-vulnerable data. */
1847 if (flag_stack_protect)
1848 add_stack_protection_conflicts ();
1850 /* Now that we have collected all stack variables, and have computed a
1851 minimal interference graph, attempt to save some stack space. */
1852 partition_stack_vars ();
1853 if (dump_file)
1854 dump_stack_var_partition ();
1857 switch (flag_stack_protect)
1859 case SPCT_FLAG_ALL:
1860 create_stack_guard ();
1861 break;
1863 case SPCT_FLAG_STRONG:
1864 if (gen_stack_protect_signal
1865 || cfun->calls_alloca || has_protected_decls)
1866 create_stack_guard ();
1867 break;
1869 case SPCT_FLAG_DEFAULT:
1870 if (cfun->calls_alloca || has_protected_decls)
1871 create_stack_guard ();
1872 break;
1874 default:
1878 /* Assign rtl to each variable based on these partitions. */
1879 if (stack_vars_num > 0)
1881 struct stack_vars_data data;
1883 data.asan_vec = vNULL;
1884 data.asan_decl_vec = vNULL;
1885 data.asan_base = NULL_RTX;
1886 data.asan_alignb = 0;
1888 /* Reorder decls to be protected by iterating over the variables
1889 array multiple times, and allocating out of each phase in turn. */
1890 /* ??? We could probably integrate this into the qsort we did
1891 earlier, such that we naturally see these variables first,
1892 and thus naturally allocate things in the right order. */
1893 if (has_protected_decls)
1895 /* Phase 1 contains only character arrays. */
1896 expand_stack_vars (stack_protect_decl_phase_1, &data);
1898 /* Phase 2 contains other kinds of arrays. */
1899 if (flag_stack_protect == 2)
1900 expand_stack_vars (stack_protect_decl_phase_2, &data);
1903 if ((flag_sanitize & SANITIZE_ADDRESS) && ASAN_STACK)
1904 /* Phase 3, any partitions that need asan protection
1905 in addition to phase 1 and 2. */
1906 expand_stack_vars (asan_decl_phase_3, &data);
1908 if (!data.asan_vec.is_empty ())
1910 HOST_WIDE_INT prev_offset = frame_offset;
1911 HOST_WIDE_INT offset, sz, redzonesz;
1912 redzonesz = ASAN_RED_ZONE_SIZE;
1913 sz = data.asan_vec[0] - prev_offset;
1914 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
1915 && data.asan_alignb <= 4096
1916 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
1917 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
1918 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
1919 offset
1920 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
1921 data.asan_vec.safe_push (prev_offset);
1922 data.asan_vec.safe_push (offset);
1923 /* Leave space for alignment if STRICT_ALIGNMENT. */
1924 if (STRICT_ALIGNMENT)
1925 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
1926 << ASAN_SHADOW_SHIFT)
1927 / BITS_PER_UNIT, 1);
1929 var_end_seq
1930 = asan_emit_stack_protection (virtual_stack_vars_rtx,
1931 data.asan_base,
1932 data.asan_alignb,
1933 data.asan_vec.address (),
1934 data.asan_decl_vec.address (),
1935 data.asan_vec.length ());
1938 expand_stack_vars (NULL, &data);
1940 data.asan_vec.release ();
1941 data.asan_decl_vec.release ();
1944 fini_vars_expansion ();
1946 /* If there were any artificial non-ignored vars without rtl
1947 found earlier, see if deferred stack allocation hasn't assigned
1948 rtl to them. */
1949 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
1951 rtx rtl = DECL_RTL_IF_SET (var);
1953 /* Keep artificial non-ignored vars in cfun->local_decls
1954 chain until instantiate_decls. */
1955 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1956 add_local_decl (cfun, var);
1958 maybe_local_decls.release ();
1960 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1961 if (STACK_ALIGNMENT_NEEDED)
1963 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1964 if (!FRAME_GROWS_DOWNWARD)
1965 frame_offset += align - 1;
1966 frame_offset &= -align;
1969 return var_end_seq;
1973 /* If we need to produce a detailed dump, print the tree representation
1974 for STMT to the dump file. SINCE is the last RTX after which the RTL
1975 generated for STMT should have been appended. */
1977 static void
1978 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx_insn *since)
1980 if (dump_file && (dump_flags & TDF_DETAILS))
1982 fprintf (dump_file, "\n;; ");
1983 print_gimple_stmt (dump_file, stmt, 0,
1984 TDF_SLIM | (dump_flags & TDF_LINENO));
1985 fprintf (dump_file, "\n");
1987 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1991 /* Maps the blocks that do not contain tree labels to rtx labels. */
1993 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
1995 /* Returns the label_rtx expression for a label starting basic block BB. */
1997 static rtx
1998 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2000 gimple_stmt_iterator gsi;
2001 tree lab;
2002 gimple lab_stmt;
2004 if (bb->flags & BB_RTL)
2005 return block_label (bb);
2007 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2008 if (elt)
2009 return *elt;
2011 /* Find the tree label if it is present. */
2013 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2015 lab_stmt = gsi_stmt (gsi);
2016 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
2017 break;
2019 lab = gimple_label_label (lab_stmt);
2020 if (DECL_NONLOCAL (lab))
2021 break;
2023 return label_rtx (lab);
2026 rtx_code_label *l = gen_label_rtx ();
2027 lab_rtx_for_bb->put (bb, l);
2028 return l;
2032 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2033 of a basic block where we just expanded the conditional at the end,
2034 possibly clean up the CFG and instruction sequence. LAST is the
2035 last instruction before the just emitted jump sequence. */
2037 static void
2038 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2040 /* Special case: when jumpif decides that the condition is
2041 trivial it emits an unconditional jump (and the necessary
2042 barrier). But we still have two edges, the fallthru one is
2043 wrong. purge_dead_edges would clean this up later. Unfortunately
2044 we have to insert insns (and split edges) before
2045 find_many_sub_basic_blocks and hence before purge_dead_edges.
2046 But splitting edges might create new blocks which depend on the
2047 fact that if there are two edges there's no barrier. So the
2048 barrier would get lost and verify_flow_info would ICE. Instead
2049 of auditing all edge splitters to care for the barrier (which
2050 normally isn't there in a cleaned CFG), fix it here. */
2051 if (BARRIER_P (get_last_insn ()))
2053 rtx_insn *insn;
2054 remove_edge (e);
2055 /* Now, we have a single successor block, if we have insns to
2056 insert on the remaining edge we potentially will insert
2057 it at the end of this block (if the dest block isn't feasible)
2058 in order to avoid splitting the edge. This insertion will take
2059 place in front of the last jump. But we might have emitted
2060 multiple jumps (conditional and one unconditional) to the
2061 same destination. Inserting in front of the last one then
2062 is a problem. See PR 40021. We fix this by deleting all
2063 jumps except the last unconditional one. */
2064 insn = PREV_INSN (get_last_insn ());
2065 /* Make sure we have an unconditional jump. Otherwise we're
2066 confused. */
2067 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2068 for (insn = PREV_INSN (insn); insn != last;)
2070 insn = PREV_INSN (insn);
2071 if (JUMP_P (NEXT_INSN (insn)))
2073 if (!any_condjump_p (NEXT_INSN (insn)))
2075 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2076 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2078 delete_insn (NEXT_INSN (insn));
2084 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2085 Returns a new basic block if we've terminated the current basic
2086 block and created a new one. */
2088 static basic_block
2089 expand_gimple_cond (basic_block bb, gimple stmt)
2091 basic_block new_bb, dest;
2092 edge new_edge;
2093 edge true_edge;
2094 edge false_edge;
2095 rtx_insn *last2, *last;
2096 enum tree_code code;
2097 tree op0, op1;
2099 code = gimple_cond_code (stmt);
2100 op0 = gimple_cond_lhs (stmt);
2101 op1 = gimple_cond_rhs (stmt);
2102 /* We're sometimes presented with such code:
2103 D.123_1 = x < y;
2104 if (D.123_1 != 0)
2106 This would expand to two comparisons which then later might
2107 be cleaned up by combine. But some pattern matchers like if-conversion
2108 work better when there's only one compare, so make up for this
2109 here as special exception if TER would have made the same change. */
2110 if (SA.values
2111 && TREE_CODE (op0) == SSA_NAME
2112 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2113 && TREE_CODE (op1) == INTEGER_CST
2114 && ((gimple_cond_code (stmt) == NE_EXPR
2115 && integer_zerop (op1))
2116 || (gimple_cond_code (stmt) == EQ_EXPR
2117 && integer_onep (op1)))
2118 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2120 gimple second = SSA_NAME_DEF_STMT (op0);
2121 if (gimple_code (second) == GIMPLE_ASSIGN)
2123 enum tree_code code2 = gimple_assign_rhs_code (second);
2124 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2126 code = code2;
2127 op0 = gimple_assign_rhs1 (second);
2128 op1 = gimple_assign_rhs2 (second);
2130 /* If jumps are cheap turn some more codes into
2131 jumpy sequences. */
2132 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
2134 if ((code2 == BIT_AND_EXPR
2135 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2136 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2137 || code2 == TRUTH_AND_EXPR)
2139 code = TRUTH_ANDIF_EXPR;
2140 op0 = gimple_assign_rhs1 (second);
2141 op1 = gimple_assign_rhs2 (second);
2143 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2145 code = TRUTH_ORIF_EXPR;
2146 op0 = gimple_assign_rhs1 (second);
2147 op1 = gimple_assign_rhs2 (second);
2153 last2 = last = get_last_insn ();
2155 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2156 set_curr_insn_location (gimple_location (stmt));
2158 /* These flags have no purpose in RTL land. */
2159 true_edge->flags &= ~EDGE_TRUE_VALUE;
2160 false_edge->flags &= ~EDGE_FALSE_VALUE;
2162 /* We can either have a pure conditional jump with one fallthru edge or
2163 two-way jump that needs to be decomposed into two basic blocks. */
2164 if (false_edge->dest == bb->next_bb)
2166 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2167 true_edge->probability);
2168 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2169 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2170 set_curr_insn_location (true_edge->goto_locus);
2171 false_edge->flags |= EDGE_FALLTHRU;
2172 maybe_cleanup_end_of_block (false_edge, last);
2173 return NULL;
2175 if (true_edge->dest == bb->next_bb)
2177 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2178 false_edge->probability);
2179 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2180 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2181 set_curr_insn_location (false_edge->goto_locus);
2182 true_edge->flags |= EDGE_FALLTHRU;
2183 maybe_cleanup_end_of_block (true_edge, last);
2184 return NULL;
2187 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2188 true_edge->probability);
2189 last = get_last_insn ();
2190 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2191 set_curr_insn_location (false_edge->goto_locus);
2192 emit_jump (label_rtx_for_bb (false_edge->dest));
2194 BB_END (bb) = last;
2195 if (BARRIER_P (BB_END (bb)))
2196 BB_END (bb) = PREV_INSN (BB_END (bb));
2197 update_bb_for_insn (bb);
2199 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2200 dest = false_edge->dest;
2201 redirect_edge_succ (false_edge, new_bb);
2202 false_edge->flags |= EDGE_FALLTHRU;
2203 new_bb->count = false_edge->count;
2204 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2205 add_bb_to_loop (new_bb, bb->loop_father);
2206 new_edge = make_edge (new_bb, dest, 0);
2207 new_edge->probability = REG_BR_PROB_BASE;
2208 new_edge->count = new_bb->count;
2209 if (BARRIER_P (BB_END (new_bb)))
2210 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2211 update_bb_for_insn (new_bb);
2213 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2215 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2217 set_curr_insn_location (true_edge->goto_locus);
2218 true_edge->goto_locus = curr_insn_location ();
2221 return new_bb;
2224 /* Mark all calls that can have a transaction restart. */
2226 static void
2227 mark_transaction_restart_calls (gimple stmt)
2229 struct tm_restart_node dummy;
2230 void **slot;
2232 if (!cfun->gimple_df->tm_restart)
2233 return;
2235 dummy.stmt = stmt;
2236 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
2237 if (slot)
2239 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
2240 tree list = n->label_or_list;
2241 rtx_insn *insn;
2243 for (insn = next_real_insn (get_last_insn ());
2244 !CALL_P (insn);
2245 insn = next_real_insn (insn))
2246 continue;
2248 if (TREE_CODE (list) == LABEL_DECL)
2249 add_reg_note (insn, REG_TM, label_rtx (list));
2250 else
2251 for (; list ; list = TREE_CHAIN (list))
2252 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2256 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2257 statement STMT. */
2259 static void
2260 expand_call_stmt (gimple stmt)
2262 tree exp, decl, lhs;
2263 bool builtin_p;
2264 size_t i;
2266 if (gimple_call_internal_p (stmt))
2268 expand_internal_call (stmt);
2269 return;
2272 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2274 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2275 decl = gimple_call_fndecl (stmt);
2276 builtin_p = decl && DECL_BUILT_IN (decl);
2278 /* If this is not a builtin function, the function type through which the
2279 call is made may be different from the type of the function. */
2280 if (!builtin_p)
2281 CALL_EXPR_FN (exp)
2282 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2283 CALL_EXPR_FN (exp));
2285 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2286 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2288 for (i = 0; i < gimple_call_num_args (stmt); i++)
2290 tree arg = gimple_call_arg (stmt, i);
2291 gimple def;
2292 /* TER addresses into arguments of builtin functions so we have a
2293 chance to infer more correct alignment information. See PR39954. */
2294 if (builtin_p
2295 && TREE_CODE (arg) == SSA_NAME
2296 && (def = get_gimple_for_ssa_name (arg))
2297 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2298 arg = gimple_assign_rhs1 (def);
2299 CALL_EXPR_ARG (exp, i) = arg;
2302 if (gimple_has_side_effects (stmt))
2303 TREE_SIDE_EFFECTS (exp) = 1;
2305 if (gimple_call_nothrow_p (stmt))
2306 TREE_NOTHROW (exp) = 1;
2308 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2309 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2310 if (decl
2311 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2312 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2313 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2314 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2315 else
2316 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2317 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2318 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2320 /* Ensure RTL is created for debug args. */
2321 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2323 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2324 unsigned int ix;
2325 tree dtemp;
2327 if (debug_args)
2328 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2330 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2331 expand_debug_expr (dtemp);
2335 lhs = gimple_call_lhs (stmt);
2336 if (lhs)
2337 expand_assignment (lhs, exp, false);
2338 else
2339 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2341 mark_transaction_restart_calls (stmt);
2345 /* Generate RTL for an asm statement (explicit assembler code).
2346 STRING is a STRING_CST node containing the assembler code text,
2347 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2348 insn is volatile; don't optimize it. */
2350 static void
2351 expand_asm_loc (tree string, int vol, location_t locus)
2353 rtx body;
2355 if (TREE_CODE (string) == ADDR_EXPR)
2356 string = TREE_OPERAND (string, 0);
2358 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2359 ggc_strdup (TREE_STRING_POINTER (string)),
2360 locus);
2362 MEM_VOLATILE_P (body) = vol;
2364 emit_insn (body);
2367 /* Return the number of times character C occurs in string S. */
2368 static int
2369 n_occurrences (int c, const char *s)
2371 int n = 0;
2372 while (*s)
2373 n += (*s++ == c);
2374 return n;
2377 /* A subroutine of expand_asm_operands. Check that all operands have
2378 the same number of alternatives. Return true if so. */
2380 static bool
2381 check_operand_nalternatives (tree outputs, tree inputs)
2383 if (outputs || inputs)
2385 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
2386 int nalternatives
2387 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
2388 tree next = inputs;
2390 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2392 error ("too many alternatives in %<asm%>");
2393 return false;
2396 tmp = outputs;
2397 while (tmp)
2399 const char *constraint
2400 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2402 if (n_occurrences (',', constraint) != nalternatives)
2404 error ("operand constraints for %<asm%> differ "
2405 "in number of alternatives");
2406 return false;
2409 if (TREE_CHAIN (tmp))
2410 tmp = TREE_CHAIN (tmp);
2411 else
2412 tmp = next, next = 0;
2416 return true;
2419 /* Check for overlap between registers marked in CLOBBERED_REGS and
2420 anything inappropriate in T. Emit error and return the register
2421 variable definition for error, NULL_TREE for ok. */
2423 static bool
2424 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2426 /* Conflicts between asm-declared register variables and the clobber
2427 list are not allowed. */
2428 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2430 if (overlap)
2432 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2433 DECL_NAME (overlap));
2435 /* Reset registerness to stop multiple errors emitted for a single
2436 variable. */
2437 DECL_REGISTER (overlap) = 0;
2438 return true;
2441 return false;
2444 /* Generate RTL for an asm statement with arguments.
2445 STRING is the instruction template.
2446 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2447 Each output or input has an expression in the TREE_VALUE and
2448 a tree list in TREE_PURPOSE which in turn contains a constraint
2449 name in TREE_VALUE (or NULL_TREE) and a constraint string
2450 in TREE_PURPOSE.
2451 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2452 that is clobbered by this insn.
2454 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2455 should be the fallthru basic block of the asm goto.
2457 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2458 Some elements of OUTPUTS may be replaced with trees representing temporary
2459 values. The caller should copy those temporary values to the originally
2460 specified lvalues.
2462 VOL nonzero means the insn is volatile; don't optimize it. */
2464 static void
2465 expand_asm_operands (tree string, tree outputs, tree inputs,
2466 tree clobbers, tree labels, basic_block fallthru_bb,
2467 int vol, location_t locus)
2469 rtvec argvec, constraintvec, labelvec;
2470 rtx body;
2471 int ninputs = list_length (inputs);
2472 int noutputs = list_length (outputs);
2473 int nlabels = list_length (labels);
2474 int ninout;
2475 int nclobbers;
2476 HARD_REG_SET clobbered_regs;
2477 int clobber_conflict_found = 0;
2478 tree tail;
2479 tree t;
2480 int i;
2481 /* Vector of RTX's of evaluated output operands. */
2482 rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
2483 int *inout_opnum = XALLOCAVEC (int, noutputs);
2484 rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
2485 machine_mode *inout_mode = XALLOCAVEC (machine_mode, noutputs);
2486 const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
2487 int old_generating_concat_p = generating_concat_p;
2488 rtx_code_label *fallthru_label = NULL;
2490 /* An ASM with no outputs needs to be treated as volatile, for now. */
2491 if (noutputs == 0)
2492 vol = 1;
2494 if (! check_operand_nalternatives (outputs, inputs))
2495 return;
2497 string = resolve_asm_operand_names (string, outputs, inputs, labels);
2499 /* Collect constraints. */
2500 i = 0;
2501 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
2502 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2503 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
2504 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2506 /* Sometimes we wish to automatically clobber registers across an asm.
2507 Case in point is when the i386 backend moved from cc0 to a hard reg --
2508 maintaining source-level compatibility means automatically clobbering
2509 the flags register. */
2510 clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
2512 /* Count the number of meaningful clobbered registers, ignoring what
2513 we would ignore later. */
2514 nclobbers = 0;
2515 CLEAR_HARD_REG_SET (clobbered_regs);
2516 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2518 const char *regname;
2519 int nregs;
2521 if (TREE_VALUE (tail) == error_mark_node)
2522 return;
2523 regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2525 i = decode_reg_name_and_count (regname, &nregs);
2526 if (i == -4)
2527 ++nclobbers;
2528 else if (i == -2)
2529 error ("unknown register name %qs in %<asm%>", regname);
2531 /* Mark clobbered registers. */
2532 if (i >= 0)
2534 int reg;
2536 for (reg = i; reg < i + nregs; reg++)
2538 ++nclobbers;
2540 /* Clobbering the PIC register is an error. */
2541 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2543 error ("PIC register clobbered by %qs in %<asm%>", regname);
2544 return;
2547 SET_HARD_REG_BIT (clobbered_regs, reg);
2552 /* First pass over inputs and outputs checks validity and sets
2553 mark_addressable if needed. */
2555 ninout = 0;
2556 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2558 tree val = TREE_VALUE (tail);
2559 tree type = TREE_TYPE (val);
2560 const char *constraint;
2561 bool is_inout;
2562 bool allows_reg;
2563 bool allows_mem;
2565 /* If there's an erroneous arg, emit no insn. */
2566 if (type == error_mark_node)
2567 return;
2569 /* Try to parse the output constraint. If that fails, there's
2570 no point in going further. */
2571 constraint = constraints[i];
2572 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2573 &allows_mem, &allows_reg, &is_inout))
2574 return;
2576 if (! allows_reg
2577 && (allows_mem
2578 || is_inout
2579 || (DECL_P (val)
2580 && REG_P (DECL_RTL (val))
2581 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2582 mark_addressable (val);
2584 if (is_inout)
2585 ninout++;
2588 ninputs += ninout;
2589 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
2591 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2592 return;
2595 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
2597 bool allows_reg, allows_mem;
2598 const char *constraint;
2600 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
2601 would get VOIDmode and that could cause a crash in reload. */
2602 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
2603 return;
2605 constraint = constraints[i + noutputs];
2606 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2607 constraints, &allows_mem, &allows_reg))
2608 return;
2610 if (! allows_reg && allows_mem)
2611 mark_addressable (TREE_VALUE (tail));
2614 /* Second pass evaluates arguments. */
2616 /* Make sure stack is consistent for asm goto. */
2617 if (nlabels > 0)
2618 do_pending_stack_adjust ();
2620 ninout = 0;
2621 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2623 tree val = TREE_VALUE (tail);
2624 tree type = TREE_TYPE (val);
2625 bool is_inout;
2626 bool allows_reg;
2627 bool allows_mem;
2628 rtx op;
2629 bool ok;
2631 ok = parse_output_constraint (&constraints[i], i, ninputs,
2632 noutputs, &allows_mem, &allows_reg,
2633 &is_inout);
2634 gcc_assert (ok);
2636 /* If an output operand is not a decl or indirect ref and our constraint
2637 allows a register, make a temporary to act as an intermediate.
2638 Make the asm insn write into that, then our caller will copy it to
2639 the real output operand. Likewise for promoted variables. */
2641 generating_concat_p = 0;
2643 real_output_rtx[i] = NULL_RTX;
2644 if ((TREE_CODE (val) == INDIRECT_REF
2645 && allows_mem)
2646 || (DECL_P (val)
2647 && (allows_mem || REG_P (DECL_RTL (val)))
2648 && ! (REG_P (DECL_RTL (val))
2649 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2650 || ! allows_reg
2651 || is_inout)
2653 op = expand_expr (val, NULL_RTX, VOIDmode,
2654 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2655 if (MEM_P (op))
2656 op = validize_mem (op);
2658 if (! allows_reg && !MEM_P (op))
2659 error ("output number %d not directly addressable", i);
2660 if ((! allows_mem && MEM_P (op))
2661 || GET_CODE (op) == CONCAT)
2663 real_output_rtx[i] = op;
2664 op = gen_reg_rtx (GET_MODE (op));
2665 if (is_inout)
2666 emit_move_insn (op, real_output_rtx[i]);
2669 else
2671 op = assign_temp (type, 0, 1);
2672 op = validize_mem (op);
2673 if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
2674 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
2675 TREE_VALUE (tail) = make_tree (type, op);
2677 output_rtx[i] = op;
2679 generating_concat_p = old_generating_concat_p;
2681 if (is_inout)
2683 inout_mode[ninout] = TYPE_MODE (type);
2684 inout_opnum[ninout++] = i;
2687 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2688 clobber_conflict_found = 1;
2691 /* Make vectors for the expression-rtx, constraint strings,
2692 and named operands. */
2694 argvec = rtvec_alloc (ninputs);
2695 constraintvec = rtvec_alloc (ninputs);
2696 labelvec = rtvec_alloc (nlabels);
2698 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
2699 : GET_MODE (output_rtx[0])),
2700 ggc_strdup (TREE_STRING_POINTER (string)),
2701 empty_string, 0, argvec, constraintvec,
2702 labelvec, locus);
2704 MEM_VOLATILE_P (body) = vol;
2706 /* Eval the inputs and put them into ARGVEC.
2707 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
2709 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
2711 bool allows_reg, allows_mem;
2712 const char *constraint;
2713 tree val, type;
2714 rtx op;
2715 bool ok;
2717 constraint = constraints[i + noutputs];
2718 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
2719 constraints, &allows_mem, &allows_reg);
2720 gcc_assert (ok);
2722 generating_concat_p = 0;
2724 val = TREE_VALUE (tail);
2725 type = TREE_TYPE (val);
2726 /* EXPAND_INITIALIZER will not generate code for valid initializer
2727 constants, but will still generate code for other types of operand.
2728 This is the behavior we want for constant constraints. */
2729 op = expand_expr (val, NULL_RTX, VOIDmode,
2730 allows_reg ? EXPAND_NORMAL
2731 : allows_mem ? EXPAND_MEMORY
2732 : EXPAND_INITIALIZER);
2734 /* Never pass a CONCAT to an ASM. */
2735 if (GET_CODE (op) == CONCAT)
2736 op = force_reg (GET_MODE (op), op);
2737 else if (MEM_P (op))
2738 op = validize_mem (op);
2740 if (asm_operand_ok (op, constraint, NULL) <= 0)
2742 if (allows_reg && TYPE_MODE (type) != BLKmode)
2743 op = force_reg (TYPE_MODE (type), op);
2744 else if (!allows_mem)
2745 warning (0, "asm operand %d probably doesn%'t match constraints",
2746 i + noutputs);
2747 else if (MEM_P (op))
2749 /* We won't recognize either volatile memory or memory
2750 with a queued address as available a memory_operand
2751 at this point. Ignore it: clearly this *is* a memory. */
2753 else
2754 gcc_unreachable ();
2757 generating_concat_p = old_generating_concat_p;
2758 ASM_OPERANDS_INPUT (body, i) = op;
2760 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
2761 = gen_rtx_ASM_INPUT_loc (TYPE_MODE (type),
2762 ggc_strdup (constraints[i + noutputs]),
2763 locus);
2765 if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
2766 clobber_conflict_found = 1;
2769 /* Protect all the operands from the queue now that they have all been
2770 evaluated. */
2772 generating_concat_p = 0;
2774 /* For in-out operands, copy output rtx to input rtx. */
2775 for (i = 0; i < ninout; i++)
2777 int j = inout_opnum[i];
2778 char buffer[16];
2780 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
2781 = output_rtx[j];
2783 sprintf (buffer, "%d", j);
2784 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
2785 = gen_rtx_ASM_INPUT_loc (inout_mode[i], ggc_strdup (buffer), locus);
2788 /* Copy labels to the vector. */
2789 for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
2791 rtx r;
2792 /* If asm goto has any labels in the fallthru basic block, use
2793 a label that we emit immediately after the asm goto. Expansion
2794 may insert further instructions into the same basic block after
2795 asm goto and if we don't do this, insertion of instructions on
2796 the fallthru edge might misbehave. See PR58670. */
2797 if (fallthru_bb
2798 && label_to_block_fn (cfun, TREE_VALUE (tail)) == fallthru_bb)
2800 if (fallthru_label == NULL_RTX)
2801 fallthru_label = gen_label_rtx ();
2802 r = fallthru_label;
2804 else
2805 r = label_rtx (TREE_VALUE (tail));
2806 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
2809 generating_concat_p = old_generating_concat_p;
2811 /* Now, for each output, construct an rtx
2812 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
2813 ARGVEC CONSTRAINTS OPNAMES))
2814 If there is more than one, put them inside a PARALLEL. */
2816 if (nlabels > 0 && nclobbers == 0)
2818 gcc_assert (noutputs == 0);
2819 emit_jump_insn (body);
2821 else if (noutputs == 0 && nclobbers == 0)
2823 /* No output operands: put in a raw ASM_OPERANDS rtx. */
2824 emit_insn (body);
2826 else if (noutputs == 1 && nclobbers == 0)
2828 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
2829 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
2831 else
2833 rtx obody = body;
2834 int num = noutputs;
2836 if (num == 0)
2837 num = 1;
2839 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
2841 /* For each output operand, store a SET. */
2842 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
2844 XVECEXP (body, 0, i)
2845 = gen_rtx_SET (VOIDmode,
2846 output_rtx[i],
2847 gen_rtx_ASM_OPERANDS
2848 (GET_MODE (output_rtx[i]),
2849 ggc_strdup (TREE_STRING_POINTER (string)),
2850 ggc_strdup (constraints[i]),
2851 i, argvec, constraintvec, labelvec, locus));
2853 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
2856 /* If there are no outputs (but there are some clobbers)
2857 store the bare ASM_OPERANDS into the PARALLEL. */
2859 if (i == 0)
2860 XVECEXP (body, 0, i++) = obody;
2862 /* Store (clobber REG) for each clobbered register specified. */
2864 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
2866 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
2867 int reg, nregs;
2868 int j = decode_reg_name_and_count (regname, &nregs);
2869 rtx clobbered_reg;
2871 if (j < 0)
2873 if (j == -3) /* `cc', which is not a register */
2874 continue;
2876 if (j == -4) /* `memory', don't cache memory across asm */
2878 XVECEXP (body, 0, i++)
2879 = gen_rtx_CLOBBER (VOIDmode,
2880 gen_rtx_MEM
2881 (BLKmode,
2882 gen_rtx_SCRATCH (VOIDmode)));
2883 continue;
2886 /* Ignore unknown register, error already signaled. */
2887 continue;
2890 for (reg = j; reg < j + nregs; reg++)
2892 /* Use QImode since that's guaranteed to clobber just
2893 * one reg. */
2894 clobbered_reg = gen_rtx_REG (QImode, reg);
2896 /* Do sanity check for overlap between clobbers and
2897 respectively input and outputs that hasn't been
2898 handled. Such overlap should have been detected and
2899 reported above. */
2900 if (!clobber_conflict_found)
2902 int opno;
2904 /* We test the old body (obody) contents to avoid
2905 tripping over the under-construction body. */
2906 for (opno = 0; opno < noutputs; opno++)
2907 if (reg_overlap_mentioned_p (clobbered_reg,
2908 output_rtx[opno]))
2909 internal_error
2910 ("asm clobber conflict with output operand");
2912 for (opno = 0; opno < ninputs - ninout; opno++)
2913 if (reg_overlap_mentioned_p (clobbered_reg,
2914 ASM_OPERANDS_INPUT (obody,
2915 opno)))
2916 internal_error
2917 ("asm clobber conflict with input operand");
2920 XVECEXP (body, 0, i++)
2921 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
2925 if (nlabels > 0)
2926 emit_jump_insn (body);
2927 else
2928 emit_insn (body);
2931 if (fallthru_label)
2932 emit_label (fallthru_label);
2934 /* For any outputs that needed reloading into registers, spill them
2935 back to where they belong. */
2936 for (i = 0; i < noutputs; ++i)
2937 if (real_output_rtx[i])
2938 emit_move_insn (real_output_rtx[i], output_rtx[i]);
2940 crtl->has_asm_statement = 1;
2941 free_temp_slots ();
2945 static void
2946 expand_asm_stmt (gimple stmt)
2948 int noutputs;
2949 tree outputs, tail, t;
2950 tree *o;
2951 size_t i, n;
2952 const char *s;
2953 tree str, out, in, cl, labels;
2954 location_t locus = gimple_location (stmt);
2955 basic_block fallthru_bb = NULL;
2957 /* Meh... convert the gimple asm operands into real tree lists.
2958 Eventually we should make all routines work on the vectors instead
2959 of relying on TREE_CHAIN. */
2960 out = NULL_TREE;
2961 n = gimple_asm_noutputs (stmt);
2962 if (n > 0)
2964 t = out = gimple_asm_output_op (stmt, 0);
2965 for (i = 1; i < n; i++)
2966 t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
2969 in = NULL_TREE;
2970 n = gimple_asm_ninputs (stmt);
2971 if (n > 0)
2973 t = in = gimple_asm_input_op (stmt, 0);
2974 for (i = 1; i < n; i++)
2975 t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
2978 cl = NULL_TREE;
2979 n = gimple_asm_nclobbers (stmt);
2980 if (n > 0)
2982 t = cl = gimple_asm_clobber_op (stmt, 0);
2983 for (i = 1; i < n; i++)
2984 t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
2987 labels = NULL_TREE;
2988 n = gimple_asm_nlabels (stmt);
2989 if (n > 0)
2991 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
2992 if (fallthru)
2993 fallthru_bb = fallthru->dest;
2994 t = labels = gimple_asm_label_op (stmt, 0);
2995 for (i = 1; i < n; i++)
2996 t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
2999 s = gimple_asm_string (stmt);
3000 str = build_string (strlen (s), s);
3002 if (gimple_asm_input_p (stmt))
3004 expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
3005 return;
3008 outputs = out;
3009 noutputs = gimple_asm_noutputs (stmt);
3010 /* o[I] is the place that output number I should be written. */
3011 o = (tree *) alloca (noutputs * sizeof (tree));
3013 /* Record the contents of OUTPUTS before it is modified. */
3014 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3015 o[i] = TREE_VALUE (tail);
3017 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
3018 OUTPUTS some trees for where the values were actually stored. */
3019 expand_asm_operands (str, outputs, in, cl, labels, fallthru_bb,
3020 gimple_asm_volatile_p (stmt), locus);
3022 /* Copy all the intermediate outputs into the specified outputs. */
3023 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
3025 if (o[i] != TREE_VALUE (tail))
3027 expand_assignment (o[i], TREE_VALUE (tail), false);
3028 free_temp_slots ();
3030 /* Restore the original value so that it's correct the next
3031 time we expand this function. */
3032 TREE_VALUE (tail) = o[i];
3037 /* Emit code to jump to the address
3038 specified by the pointer expression EXP. */
3040 static void
3041 expand_computed_goto (tree exp)
3043 rtx x = expand_normal (exp);
3045 x = convert_memory_address (Pmode, x);
3047 do_pending_stack_adjust ();
3048 emit_indirect_jump (x);
3051 /* Generate RTL code for a `goto' statement with target label LABEL.
3052 LABEL should be a LABEL_DECL tree node that was or will later be
3053 defined with `expand_label'. */
3055 static void
3056 expand_goto (tree label)
3058 #ifdef ENABLE_CHECKING
3059 /* Check for a nonlocal goto to a containing function. Should have
3060 gotten translated to __builtin_nonlocal_goto. */
3061 tree context = decl_function_context (label);
3062 gcc_assert (!context || context == current_function_decl);
3063 #endif
3065 emit_jump (label_rtx (label));
3068 /* Output a return with no value. */
3070 static void
3071 expand_null_return_1 (void)
3073 clear_pending_stack_adjust ();
3074 do_pending_stack_adjust ();
3075 emit_jump (return_label);
3078 /* Generate RTL to return from the current function, with no value.
3079 (That is, we do not do anything about returning any value.) */
3081 void
3082 expand_null_return (void)
3084 /* If this function was declared to return a value, but we
3085 didn't, clobber the return registers so that they are not
3086 propagated live to the rest of the function. */
3087 clobber_return_register ();
3089 expand_null_return_1 ();
3092 /* Generate RTL to return from the current function, with value VAL. */
3094 static void
3095 expand_value_return (rtx val)
3097 /* Copy the value to the return location unless it's already there. */
3099 tree decl = DECL_RESULT (current_function_decl);
3100 rtx return_reg = DECL_RTL (decl);
3101 if (return_reg != val)
3103 tree funtype = TREE_TYPE (current_function_decl);
3104 tree type = TREE_TYPE (decl);
3105 int unsignedp = TYPE_UNSIGNED (type);
3106 machine_mode old_mode = DECL_MODE (decl);
3107 machine_mode mode;
3108 if (DECL_BY_REFERENCE (decl))
3109 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3110 else
3111 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3113 if (mode != old_mode)
3114 val = convert_modes (mode, old_mode, val, unsignedp);
3116 if (GET_CODE (return_reg) == PARALLEL)
3117 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3118 else
3119 emit_move_insn (return_reg, val);
3122 expand_null_return_1 ();
3125 /* Generate RTL to evaluate the expression RETVAL and return it
3126 from the current function. */
3128 static void
3129 expand_return (tree retval)
3131 rtx result_rtl;
3132 rtx val = 0;
3133 tree retval_rhs;
3135 /* If function wants no value, give it none. */
3136 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3138 expand_normal (retval);
3139 expand_null_return ();
3140 return;
3143 if (retval == error_mark_node)
3145 /* Treat this like a return of no value from a function that
3146 returns a value. */
3147 expand_null_return ();
3148 return;
3150 else if ((TREE_CODE (retval) == MODIFY_EXPR
3151 || TREE_CODE (retval) == INIT_EXPR)
3152 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3153 retval_rhs = TREE_OPERAND (retval, 1);
3154 else
3155 retval_rhs = retval;
3157 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3159 /* If we are returning the RESULT_DECL, then the value has already
3160 been stored into it, so we don't have to do anything special. */
3161 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3162 expand_value_return (result_rtl);
3164 /* If the result is an aggregate that is being returned in one (or more)
3165 registers, load the registers here. */
3167 else if (retval_rhs != 0
3168 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3169 && REG_P (result_rtl))
3171 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3172 if (val)
3174 /* Use the mode of the result value on the return register. */
3175 PUT_MODE (result_rtl, GET_MODE (val));
3176 expand_value_return (val);
3178 else
3179 expand_null_return ();
3181 else if (retval_rhs != 0
3182 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3183 && (REG_P (result_rtl)
3184 || (GET_CODE (result_rtl) == PARALLEL)))
3186 /* Compute the return value into a temporary (usually a pseudo reg). */
3188 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3189 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3190 val = force_not_mem (val);
3191 expand_value_return (val);
3193 else
3195 /* No hard reg used; calculate value into hard return reg. */
3196 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3197 expand_value_return (result_rtl);
3201 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3202 STMT that doesn't require special handling for outgoing edges. That
3203 is no tailcalls and no GIMPLE_COND. */
3205 static void
3206 expand_gimple_stmt_1 (gimple stmt)
3208 tree op0;
3210 set_curr_insn_location (gimple_location (stmt));
3212 switch (gimple_code (stmt))
3214 case GIMPLE_GOTO:
3215 op0 = gimple_goto_dest (stmt);
3216 if (TREE_CODE (op0) == LABEL_DECL)
3217 expand_goto (op0);
3218 else
3219 expand_computed_goto (op0);
3220 break;
3221 case GIMPLE_LABEL:
3222 expand_label (gimple_label_label (stmt));
3223 break;
3224 case GIMPLE_NOP:
3225 case GIMPLE_PREDICT:
3226 break;
3227 case GIMPLE_SWITCH:
3228 expand_case (stmt);
3229 break;
3230 case GIMPLE_ASM:
3231 expand_asm_stmt (stmt);
3232 break;
3233 case GIMPLE_CALL:
3234 expand_call_stmt (stmt);
3235 break;
3237 case GIMPLE_RETURN:
3238 op0 = gimple_return_retval (stmt);
3240 if (op0 && op0 != error_mark_node)
3242 tree result = DECL_RESULT (current_function_decl);
3244 /* If we are not returning the current function's RESULT_DECL,
3245 build an assignment to it. */
3246 if (op0 != result)
3248 /* I believe that a function's RESULT_DECL is unique. */
3249 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3251 /* ??? We'd like to use simply expand_assignment here,
3252 but this fails if the value is of BLKmode but the return
3253 decl is a register. expand_return has special handling
3254 for this combination, which eventually should move
3255 to common code. See comments there. Until then, let's
3256 build a modify expression :-/ */
3257 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3258 result, op0);
3261 if (!op0)
3262 expand_null_return ();
3263 else
3264 expand_return (op0);
3265 break;
3267 case GIMPLE_ASSIGN:
3269 tree lhs = gimple_assign_lhs (stmt);
3271 /* Tree expand used to fiddle with |= and &= of two bitfield
3272 COMPONENT_REFs here. This can't happen with gimple, the LHS
3273 of binary assigns must be a gimple reg. */
3275 if (TREE_CODE (lhs) != SSA_NAME
3276 || get_gimple_rhs_class (gimple_expr_code (stmt))
3277 == GIMPLE_SINGLE_RHS)
3279 tree rhs = gimple_assign_rhs1 (stmt);
3280 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3281 == GIMPLE_SINGLE_RHS);
3282 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
3283 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3284 if (TREE_CLOBBER_P (rhs))
3285 /* This is a clobber to mark the going out of scope for
3286 this LHS. */
3288 else
3289 expand_assignment (lhs, rhs,
3290 gimple_assign_nontemporal_move_p (stmt));
3292 else
3294 rtx target, temp;
3295 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
3296 struct separate_ops ops;
3297 bool promoted = false;
3299 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3300 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3301 promoted = true;
3303 ops.code = gimple_assign_rhs_code (stmt);
3304 ops.type = TREE_TYPE (lhs);
3305 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
3307 case GIMPLE_TERNARY_RHS:
3308 ops.op2 = gimple_assign_rhs3 (stmt);
3309 /* Fallthru */
3310 case GIMPLE_BINARY_RHS:
3311 ops.op1 = gimple_assign_rhs2 (stmt);
3312 /* Fallthru */
3313 case GIMPLE_UNARY_RHS:
3314 ops.op0 = gimple_assign_rhs1 (stmt);
3315 break;
3316 default:
3317 gcc_unreachable ();
3319 ops.location = gimple_location (stmt);
3321 /* If we want to use a nontemporal store, force the value to
3322 register first. If we store into a promoted register,
3323 don't directly expand to target. */
3324 temp = nontemporal || promoted ? NULL_RTX : target;
3325 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3326 EXPAND_NORMAL);
3328 if (temp == target)
3330 else if (promoted)
3332 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3333 /* If TEMP is a VOIDmode constant, use convert_modes to make
3334 sure that we properly convert it. */
3335 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3337 temp = convert_modes (GET_MODE (target),
3338 TYPE_MODE (ops.type),
3339 temp, unsignedp);
3340 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3341 GET_MODE (target), temp, unsignedp);
3344 convert_move (SUBREG_REG (target), temp, unsignedp);
3346 else if (nontemporal && emit_storent_insn (target, temp))
3348 else
3350 temp = force_operand (temp, target);
3351 if (temp != target)
3352 emit_move_insn (target, temp);
3356 break;
3358 default:
3359 gcc_unreachable ();
3363 /* Expand one gimple statement STMT and return the last RTL instruction
3364 before any of the newly generated ones.
3366 In addition to generating the necessary RTL instructions this also
3367 sets REG_EH_REGION notes if necessary and sets the current source
3368 location for diagnostics. */
3370 static rtx_insn *
3371 expand_gimple_stmt (gimple stmt)
3373 location_t saved_location = input_location;
3374 rtx_insn *last = get_last_insn ();
3375 int lp_nr;
3377 gcc_assert (cfun);
3379 /* We need to save and restore the current source location so that errors
3380 discovered during expansion are emitted with the right location. But
3381 it would be better if the diagnostic routines used the source location
3382 embedded in the tree nodes rather than globals. */
3383 if (gimple_has_location (stmt))
3384 input_location = gimple_location (stmt);
3386 expand_gimple_stmt_1 (stmt);
3388 /* Free any temporaries used to evaluate this statement. */
3389 free_temp_slots ();
3391 input_location = saved_location;
3393 /* Mark all insns that may trap. */
3394 lp_nr = lookup_stmt_eh_lp (stmt);
3395 if (lp_nr)
3397 rtx_insn *insn;
3398 for (insn = next_real_insn (last); insn;
3399 insn = next_real_insn (insn))
3401 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3402 /* If we want exceptions for non-call insns, any
3403 may_trap_p instruction may throw. */
3404 && GET_CODE (PATTERN (insn)) != CLOBBER
3405 && GET_CODE (PATTERN (insn)) != USE
3406 && insn_could_throw_p (insn))
3407 make_reg_eh_region_note (insn, 0, lp_nr);
3411 return last;
3414 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3415 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3416 generated a tail call (something that might be denied by the ABI
3417 rules governing the call; see calls.c).
3419 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3420 can still reach the rest of BB. The case here is __builtin_sqrt,
3421 where the NaN result goes through the external function (with a
3422 tailcall) and the normal result happens via a sqrt instruction. */
3424 static basic_block
3425 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
3427 rtx_insn *last2, *last;
3428 edge e;
3429 edge_iterator ei;
3430 int probability;
3431 gcov_type count;
3433 last2 = last = expand_gimple_stmt (stmt);
3435 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3436 if (CALL_P (last) && SIBLING_CALL_P (last))
3437 goto found;
3439 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3441 *can_fallthru = true;
3442 return NULL;
3444 found:
3445 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3446 Any instructions emitted here are about to be deleted. */
3447 do_pending_stack_adjust ();
3449 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3450 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3451 EH or abnormal edges, we shouldn't have created a tail call in
3452 the first place. So it seems to me we should just be removing
3453 all edges here, or redirecting the existing fallthru edge to
3454 the exit block. */
3456 probability = 0;
3457 count = 0;
3459 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3461 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3463 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3465 e->dest->count -= e->count;
3466 e->dest->frequency -= EDGE_FREQUENCY (e);
3467 if (e->dest->count < 0)
3468 e->dest->count = 0;
3469 if (e->dest->frequency < 0)
3470 e->dest->frequency = 0;
3472 count += e->count;
3473 probability += e->probability;
3474 remove_edge (e);
3476 else
3477 ei_next (&ei);
3480 /* This is somewhat ugly: the call_expr expander often emits instructions
3481 after the sibcall (to perform the function return). These confuse the
3482 find_many_sub_basic_blocks code, so we need to get rid of these. */
3483 last = NEXT_INSN (last);
3484 gcc_assert (BARRIER_P (last));
3486 *can_fallthru = false;
3487 while (NEXT_INSN (last))
3489 /* For instance an sqrt builtin expander expands if with
3490 sibcall in the then and label for `else`. */
3491 if (LABEL_P (NEXT_INSN (last)))
3493 *can_fallthru = true;
3494 break;
3496 delete_insn (NEXT_INSN (last));
3499 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3500 | EDGE_SIBCALL);
3501 e->probability += probability;
3502 e->count += count;
3503 BB_END (bb) = last;
3504 update_bb_for_insn (bb);
3506 if (NEXT_INSN (last))
3508 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3510 last = BB_END (bb);
3511 if (BARRIER_P (last))
3512 BB_END (bb) = PREV_INSN (last);
3515 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3517 return bb;
3520 /* Return the difference between the floor and the truncated result of
3521 a signed division by OP1 with remainder MOD. */
3522 static rtx
3523 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3525 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3526 return gen_rtx_IF_THEN_ELSE
3527 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3528 gen_rtx_IF_THEN_ELSE
3529 (mode, gen_rtx_LT (BImode,
3530 gen_rtx_DIV (mode, op1, mod),
3531 const0_rtx),
3532 constm1_rtx, const0_rtx),
3533 const0_rtx);
3536 /* Return the difference between the ceil and the truncated result of
3537 a signed division by OP1 with remainder MOD. */
3538 static rtx
3539 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3541 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3542 return gen_rtx_IF_THEN_ELSE
3543 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3544 gen_rtx_IF_THEN_ELSE
3545 (mode, gen_rtx_GT (BImode,
3546 gen_rtx_DIV (mode, op1, mod),
3547 const0_rtx),
3548 const1_rtx, const0_rtx),
3549 const0_rtx);
3552 /* Return the difference between the ceil and the truncated result of
3553 an unsigned division by OP1 with remainder MOD. */
3554 static rtx
3555 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3557 /* (mod != 0 ? 1 : 0) */
3558 return gen_rtx_IF_THEN_ELSE
3559 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3560 const1_rtx, const0_rtx);
3563 /* Return the difference between the rounded and the truncated result
3564 of a signed division by OP1 with remainder MOD. Halfway cases are
3565 rounded away from zero, rather than to the nearest even number. */
3566 static rtx
3567 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3569 /* (abs (mod) >= abs (op1) - abs (mod)
3570 ? (op1 / mod > 0 ? 1 : -1)
3571 : 0) */
3572 return gen_rtx_IF_THEN_ELSE
3573 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3574 gen_rtx_MINUS (mode,
3575 gen_rtx_ABS (mode, op1),
3576 gen_rtx_ABS (mode, mod))),
3577 gen_rtx_IF_THEN_ELSE
3578 (mode, gen_rtx_GT (BImode,
3579 gen_rtx_DIV (mode, op1, mod),
3580 const0_rtx),
3581 const1_rtx, constm1_rtx),
3582 const0_rtx);
3585 /* Return the difference between the rounded and the truncated result
3586 of a unsigned division by OP1 with remainder MOD. Halfway cases
3587 are rounded away from zero, rather than to the nearest even
3588 number. */
3589 static rtx
3590 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3592 /* (mod >= op1 - mod ? 1 : 0) */
3593 return gen_rtx_IF_THEN_ELSE
3594 (mode, gen_rtx_GE (BImode, mod,
3595 gen_rtx_MINUS (mode, op1, mod)),
3596 const1_rtx, const0_rtx);
3599 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3600 any rtl. */
3602 static rtx
3603 convert_debug_memory_address (machine_mode mode, rtx x,
3604 addr_space_t as)
3606 machine_mode xmode = GET_MODE (x);
3608 #ifndef POINTERS_EXTEND_UNSIGNED
3609 gcc_assert (mode == Pmode
3610 || mode == targetm.addr_space.address_mode (as));
3611 gcc_assert (xmode == mode || xmode == VOIDmode);
3612 #else
3613 rtx temp;
3615 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3617 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3618 return x;
3620 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3621 x = simplify_gen_subreg (mode, x, xmode,
3622 subreg_lowpart_offset
3623 (mode, xmode));
3624 else if (POINTERS_EXTEND_UNSIGNED > 0)
3625 x = gen_rtx_ZERO_EXTEND (mode, x);
3626 else if (!POINTERS_EXTEND_UNSIGNED)
3627 x = gen_rtx_SIGN_EXTEND (mode, x);
3628 else
3630 switch (GET_CODE (x))
3632 case SUBREG:
3633 if ((SUBREG_PROMOTED_VAR_P (x)
3634 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3635 || (GET_CODE (SUBREG_REG (x)) == PLUS
3636 && REG_P (XEXP (SUBREG_REG (x), 0))
3637 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3638 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3639 && GET_MODE (SUBREG_REG (x)) == mode)
3640 return SUBREG_REG (x);
3641 break;
3642 case LABEL_REF:
3643 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3644 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3645 return temp;
3646 case SYMBOL_REF:
3647 temp = shallow_copy_rtx (x);
3648 PUT_MODE (temp, mode);
3649 return temp;
3650 case CONST:
3651 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3652 if (temp)
3653 temp = gen_rtx_CONST (mode, temp);
3654 return temp;
3655 case PLUS:
3656 case MINUS:
3657 if (CONST_INT_P (XEXP (x, 1)))
3659 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3660 if (temp)
3661 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3663 break;
3664 default:
3665 break;
3667 /* Don't know how to express ptr_extend as operation in debug info. */
3668 return NULL;
3670 #endif /* POINTERS_EXTEND_UNSIGNED */
3672 return x;
3675 /* Return an RTX equivalent to the value of the parameter DECL. */
3677 static rtx
3678 expand_debug_parm_decl (tree decl)
3680 rtx incoming = DECL_INCOMING_RTL (decl);
3682 if (incoming
3683 && GET_MODE (incoming) != BLKmode
3684 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
3685 || (MEM_P (incoming)
3686 && REG_P (XEXP (incoming, 0))
3687 && HARD_REGISTER_P (XEXP (incoming, 0)))))
3689 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
3691 #ifdef HAVE_window_save
3692 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
3693 If the target machine has an explicit window save instruction, the
3694 actual entry value is the corresponding OUTGOING_REGNO instead. */
3695 if (REG_P (incoming)
3696 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
3697 incoming
3698 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
3699 OUTGOING_REGNO (REGNO (incoming)), 0);
3700 else if (MEM_P (incoming))
3702 rtx reg = XEXP (incoming, 0);
3703 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
3705 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
3706 incoming = replace_equiv_address_nv (incoming, reg);
3708 else
3709 incoming = copy_rtx (incoming);
3711 #endif
3713 ENTRY_VALUE_EXP (rtl) = incoming;
3714 return rtl;
3717 if (incoming
3718 && GET_MODE (incoming) != BLKmode
3719 && !TREE_ADDRESSABLE (decl)
3720 && MEM_P (incoming)
3721 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
3722 || (GET_CODE (XEXP (incoming, 0)) == PLUS
3723 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
3724 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
3725 return copy_rtx (incoming);
3727 return NULL_RTX;
3730 /* Return an RTX equivalent to the value of the tree expression EXP. */
3732 static rtx
3733 expand_debug_expr (tree exp)
3735 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
3736 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3737 machine_mode inner_mode = VOIDmode;
3738 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3739 addr_space_t as;
3741 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3743 case tcc_expression:
3744 switch (TREE_CODE (exp))
3746 case COND_EXPR:
3747 case DOT_PROD_EXPR:
3748 case SAD_EXPR:
3749 case WIDEN_MULT_PLUS_EXPR:
3750 case WIDEN_MULT_MINUS_EXPR:
3751 case FMA_EXPR:
3752 goto ternary;
3754 case TRUTH_ANDIF_EXPR:
3755 case TRUTH_ORIF_EXPR:
3756 case TRUTH_AND_EXPR:
3757 case TRUTH_OR_EXPR:
3758 case TRUTH_XOR_EXPR:
3759 goto binary;
3761 case TRUTH_NOT_EXPR:
3762 goto unary;
3764 default:
3765 break;
3767 break;
3769 ternary:
3770 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
3771 if (!op2)
3772 return NULL_RTX;
3773 /* Fall through. */
3775 binary:
3776 case tcc_binary:
3777 case tcc_comparison:
3778 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3779 if (!op1)
3780 return NULL_RTX;
3781 /* Fall through. */
3783 unary:
3784 case tcc_unary:
3785 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3786 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3787 if (!op0)
3788 return NULL_RTX;
3789 break;
3791 case tcc_type:
3792 case tcc_statement:
3793 gcc_unreachable ();
3795 case tcc_constant:
3796 case tcc_exceptional:
3797 case tcc_declaration:
3798 case tcc_reference:
3799 case tcc_vl_exp:
3800 break;
3803 switch (TREE_CODE (exp))
3805 case STRING_CST:
3806 if (!lookup_constant_def (exp))
3808 if (strlen (TREE_STRING_POINTER (exp)) + 1
3809 != (size_t) TREE_STRING_LENGTH (exp))
3810 return NULL_RTX;
3811 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
3812 op0 = gen_rtx_MEM (BLKmode, op0);
3813 set_mem_attributes (op0, exp, 0);
3814 return op0;
3816 /* Fall through... */
3818 case INTEGER_CST:
3819 case REAL_CST:
3820 case FIXED_CST:
3821 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
3822 return op0;
3824 case COMPLEX_CST:
3825 gcc_assert (COMPLEX_MODE_P (mode));
3826 op0 = expand_debug_expr (TREE_REALPART (exp));
3827 op1 = expand_debug_expr (TREE_IMAGPART (exp));
3828 return gen_rtx_CONCAT (mode, op0, op1);
3830 case DEBUG_EXPR_DECL:
3831 op0 = DECL_RTL_IF_SET (exp);
3833 if (op0)
3834 return op0;
3836 op0 = gen_rtx_DEBUG_EXPR (mode);
3837 DEBUG_EXPR_TREE_DECL (op0) = exp;
3838 SET_DECL_RTL (exp, op0);
3840 return op0;
3842 case VAR_DECL:
3843 case PARM_DECL:
3844 case FUNCTION_DECL:
3845 case LABEL_DECL:
3846 case CONST_DECL:
3847 case RESULT_DECL:
3848 op0 = DECL_RTL_IF_SET (exp);
3850 /* This decl was probably optimized away. */
3851 if (!op0)
3853 if (TREE_CODE (exp) != VAR_DECL
3854 || DECL_EXTERNAL (exp)
3855 || !TREE_STATIC (exp)
3856 || !DECL_NAME (exp)
3857 || DECL_HARD_REGISTER (exp)
3858 || DECL_IN_CONSTANT_POOL (exp)
3859 || mode == VOIDmode)
3860 return NULL;
3862 op0 = make_decl_rtl_for_debug (exp);
3863 if (!MEM_P (op0)
3864 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
3865 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
3866 return NULL;
3868 else
3869 op0 = copy_rtx (op0);
3871 if (GET_MODE (op0) == BLKmode
3872 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
3873 below would ICE. While it is likely a FE bug,
3874 try to be robust here. See PR43166. */
3875 || mode == BLKmode
3876 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
3878 gcc_assert (MEM_P (op0));
3879 op0 = adjust_address_nv (op0, mode, 0);
3880 return op0;
3883 /* Fall through. */
3885 adjust_mode:
3886 case PAREN_EXPR:
3887 CASE_CONVERT:
3889 inner_mode = GET_MODE (op0);
3891 if (mode == inner_mode)
3892 return op0;
3894 if (inner_mode == VOIDmode)
3896 if (TREE_CODE (exp) == SSA_NAME)
3897 inner_mode = TYPE_MODE (TREE_TYPE (exp));
3898 else
3899 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3900 if (mode == inner_mode)
3901 return op0;
3904 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3906 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3907 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3908 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3909 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3910 else
3911 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3913 else if (FLOAT_MODE_P (mode))
3915 gcc_assert (TREE_CODE (exp) != SSA_NAME);
3916 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3917 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
3918 else
3919 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
3921 else if (FLOAT_MODE_P (inner_mode))
3923 if (unsignedp)
3924 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3925 else
3926 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3928 else if (CONSTANT_P (op0)
3929 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
3930 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3931 subreg_lowpart_offset (mode,
3932 inner_mode));
3933 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
3934 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
3935 : unsignedp)
3936 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3937 else
3938 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3940 return op0;
3943 case MEM_REF:
3944 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3946 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
3947 TREE_OPERAND (exp, 0),
3948 TREE_OPERAND (exp, 1));
3949 if (newexp)
3950 return expand_debug_expr (newexp);
3952 /* FALLTHROUGH */
3953 case INDIRECT_REF:
3954 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
3955 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3956 if (!op0)
3957 return NULL;
3959 if (TREE_CODE (exp) == MEM_REF)
3961 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
3962 || (GET_CODE (op0) == PLUS
3963 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
3964 /* (mem (debug_implicit_ptr)) might confuse aliasing.
3965 Instead just use get_inner_reference. */
3966 goto component_ref;
3968 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
3969 if (!op1 || !CONST_INT_P (op1))
3970 return NULL;
3972 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
3975 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
3977 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
3978 op0, as);
3979 if (op0 == NULL_RTX)
3980 return NULL;
3982 op0 = gen_rtx_MEM (mode, op0);
3983 set_mem_attributes (op0, exp, 0);
3984 if (TREE_CODE (exp) == MEM_REF
3985 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
3986 set_mem_expr (op0, NULL_TREE);
3987 set_mem_addr_space (op0, as);
3989 return op0;
3991 case TARGET_MEM_REF:
3992 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
3993 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
3994 return NULL;
3996 op0 = expand_debug_expr
3997 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
3998 if (!op0)
3999 return NULL;
4001 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4002 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4003 op0, as);
4004 if (op0 == NULL_RTX)
4005 return NULL;
4007 op0 = gen_rtx_MEM (mode, op0);
4009 set_mem_attributes (op0, exp, 0);
4010 set_mem_addr_space (op0, as);
4012 return op0;
4014 component_ref:
4015 case ARRAY_REF:
4016 case ARRAY_RANGE_REF:
4017 case COMPONENT_REF:
4018 case BIT_FIELD_REF:
4019 case REALPART_EXPR:
4020 case IMAGPART_EXPR:
4021 case VIEW_CONVERT_EXPR:
4023 machine_mode mode1;
4024 HOST_WIDE_INT bitsize, bitpos;
4025 tree offset;
4026 int volatilep = 0;
4027 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
4028 &mode1, &unsignedp, &volatilep, false);
4029 rtx orig_op0;
4031 if (bitsize == 0)
4032 return NULL;
4034 orig_op0 = op0 = expand_debug_expr (tem);
4036 if (!op0)
4037 return NULL;
4039 if (offset)
4041 machine_mode addrmode, offmode;
4043 if (!MEM_P (op0))
4044 return NULL;
4046 op0 = XEXP (op0, 0);
4047 addrmode = GET_MODE (op0);
4048 if (addrmode == VOIDmode)
4049 addrmode = Pmode;
4051 op1 = expand_debug_expr (offset);
4052 if (!op1)
4053 return NULL;
4055 offmode = GET_MODE (op1);
4056 if (offmode == VOIDmode)
4057 offmode = TYPE_MODE (TREE_TYPE (offset));
4059 if (addrmode != offmode)
4060 op1 = simplify_gen_subreg (addrmode, op1, offmode,
4061 subreg_lowpart_offset (addrmode,
4062 offmode));
4064 /* Don't use offset_address here, we don't need a
4065 recognizable address, and we don't want to generate
4066 code. */
4067 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4068 op0, op1));
4071 if (MEM_P (op0))
4073 if (mode1 == VOIDmode)
4074 /* Bitfield. */
4075 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4076 if (bitpos >= BITS_PER_UNIT)
4078 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4079 bitpos %= BITS_PER_UNIT;
4081 else if (bitpos < 0)
4083 HOST_WIDE_INT units
4084 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4085 op0 = adjust_address_nv (op0, mode1, units);
4086 bitpos += units * BITS_PER_UNIT;
4088 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4089 op0 = adjust_address_nv (op0, mode, 0);
4090 else if (GET_MODE (op0) != mode1)
4091 op0 = adjust_address_nv (op0, mode1, 0);
4092 else
4093 op0 = copy_rtx (op0);
4094 if (op0 == orig_op0)
4095 op0 = shallow_copy_rtx (op0);
4096 set_mem_attributes (op0, exp, 0);
4099 if (bitpos == 0 && mode == GET_MODE (op0))
4100 return op0;
4102 if (bitpos < 0)
4103 return NULL;
4105 if (GET_MODE (op0) == BLKmode)
4106 return NULL;
4108 if ((bitpos % BITS_PER_UNIT) == 0
4109 && bitsize == GET_MODE_BITSIZE (mode1))
4111 machine_mode opmode = GET_MODE (op0);
4113 if (opmode == VOIDmode)
4114 opmode = TYPE_MODE (TREE_TYPE (tem));
4116 /* This condition may hold if we're expanding the address
4117 right past the end of an array that turned out not to
4118 be addressable (i.e., the address was only computed in
4119 debug stmts). The gen_subreg below would rightfully
4120 crash, and the address doesn't really exist, so just
4121 drop it. */
4122 if (bitpos >= GET_MODE_BITSIZE (opmode))
4123 return NULL;
4125 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4126 return simplify_gen_subreg (mode, op0, opmode,
4127 bitpos / BITS_PER_UNIT);
4130 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4131 && TYPE_UNSIGNED (TREE_TYPE (exp))
4132 ? SIGN_EXTRACT
4133 : ZERO_EXTRACT, mode,
4134 GET_MODE (op0) != VOIDmode
4135 ? GET_MODE (op0)
4136 : TYPE_MODE (TREE_TYPE (tem)),
4137 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4140 case ABS_EXPR:
4141 return simplify_gen_unary (ABS, mode, op0, mode);
4143 case NEGATE_EXPR:
4144 return simplify_gen_unary (NEG, mode, op0, mode);
4146 case BIT_NOT_EXPR:
4147 return simplify_gen_unary (NOT, mode, op0, mode);
4149 case FLOAT_EXPR:
4150 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4151 0)))
4152 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4153 inner_mode);
4155 case FIX_TRUNC_EXPR:
4156 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4157 inner_mode);
4159 case POINTER_PLUS_EXPR:
4160 /* For the rare target where pointers are not the same size as
4161 size_t, we need to check for mis-matched modes and correct
4162 the addend. */
4163 if (op0 && op1
4164 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4165 && GET_MODE (op0) != GET_MODE (op1))
4167 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4168 /* If OP0 is a partial mode, then we must truncate, even if it has
4169 the same bitsize as OP1 as GCC's representation of partial modes
4170 is opaque. */
4171 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4172 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4173 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4174 GET_MODE (op1));
4175 else
4176 /* We always sign-extend, regardless of the signedness of
4177 the operand, because the operand is always unsigned
4178 here even if the original C expression is signed. */
4179 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4180 GET_MODE (op1));
4182 /* Fall through. */
4183 case PLUS_EXPR:
4184 return simplify_gen_binary (PLUS, mode, op0, op1);
4186 case MINUS_EXPR:
4187 return simplify_gen_binary (MINUS, mode, op0, op1);
4189 case MULT_EXPR:
4190 return simplify_gen_binary (MULT, mode, op0, op1);
4192 case RDIV_EXPR:
4193 case TRUNC_DIV_EXPR:
4194 case EXACT_DIV_EXPR:
4195 if (unsignedp)
4196 return simplify_gen_binary (UDIV, mode, op0, op1);
4197 else
4198 return simplify_gen_binary (DIV, mode, op0, op1);
4200 case TRUNC_MOD_EXPR:
4201 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4203 case FLOOR_DIV_EXPR:
4204 if (unsignedp)
4205 return simplify_gen_binary (UDIV, mode, op0, op1);
4206 else
4208 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4209 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4210 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4211 return simplify_gen_binary (PLUS, mode, div, adj);
4214 case FLOOR_MOD_EXPR:
4215 if (unsignedp)
4216 return simplify_gen_binary (UMOD, mode, op0, op1);
4217 else
4219 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4220 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4221 adj = simplify_gen_unary (NEG, mode,
4222 simplify_gen_binary (MULT, mode, adj, op1),
4223 mode);
4224 return simplify_gen_binary (PLUS, mode, mod, adj);
4227 case CEIL_DIV_EXPR:
4228 if (unsignedp)
4230 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4231 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4232 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4233 return simplify_gen_binary (PLUS, mode, div, adj);
4235 else
4237 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4238 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4239 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4240 return simplify_gen_binary (PLUS, mode, div, adj);
4243 case CEIL_MOD_EXPR:
4244 if (unsignedp)
4246 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4247 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4248 adj = simplify_gen_unary (NEG, mode,
4249 simplify_gen_binary (MULT, mode, adj, op1),
4250 mode);
4251 return simplify_gen_binary (PLUS, mode, mod, adj);
4253 else
4255 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4256 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4257 adj = simplify_gen_unary (NEG, mode,
4258 simplify_gen_binary (MULT, mode, adj, op1),
4259 mode);
4260 return simplify_gen_binary (PLUS, mode, mod, adj);
4263 case ROUND_DIV_EXPR:
4264 if (unsignedp)
4266 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4267 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4268 rtx adj = round_udiv_adjust (mode, mod, op1);
4269 return simplify_gen_binary (PLUS, mode, div, adj);
4271 else
4273 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4274 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4275 rtx adj = round_sdiv_adjust (mode, mod, op1);
4276 return simplify_gen_binary (PLUS, mode, div, adj);
4279 case ROUND_MOD_EXPR:
4280 if (unsignedp)
4282 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4283 rtx adj = round_udiv_adjust (mode, mod, op1);
4284 adj = simplify_gen_unary (NEG, mode,
4285 simplify_gen_binary (MULT, mode, adj, op1),
4286 mode);
4287 return simplify_gen_binary (PLUS, mode, mod, adj);
4289 else
4291 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4292 rtx adj = round_sdiv_adjust (mode, mod, op1);
4293 adj = simplify_gen_unary (NEG, mode,
4294 simplify_gen_binary (MULT, mode, adj, op1),
4295 mode);
4296 return simplify_gen_binary (PLUS, mode, mod, adj);
4299 case LSHIFT_EXPR:
4300 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4302 case RSHIFT_EXPR:
4303 if (unsignedp)
4304 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4305 else
4306 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4308 case LROTATE_EXPR:
4309 return simplify_gen_binary (ROTATE, mode, op0, op1);
4311 case RROTATE_EXPR:
4312 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4314 case MIN_EXPR:
4315 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4317 case MAX_EXPR:
4318 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4320 case BIT_AND_EXPR:
4321 case TRUTH_AND_EXPR:
4322 return simplify_gen_binary (AND, mode, op0, op1);
4324 case BIT_IOR_EXPR:
4325 case TRUTH_OR_EXPR:
4326 return simplify_gen_binary (IOR, mode, op0, op1);
4328 case BIT_XOR_EXPR:
4329 case TRUTH_XOR_EXPR:
4330 return simplify_gen_binary (XOR, mode, op0, op1);
4332 case TRUTH_ANDIF_EXPR:
4333 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4335 case TRUTH_ORIF_EXPR:
4336 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4338 case TRUTH_NOT_EXPR:
4339 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4341 case LT_EXPR:
4342 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4343 op0, op1);
4345 case LE_EXPR:
4346 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4347 op0, op1);
4349 case GT_EXPR:
4350 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4351 op0, op1);
4353 case GE_EXPR:
4354 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4355 op0, op1);
4357 case EQ_EXPR:
4358 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4360 case NE_EXPR:
4361 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4363 case UNORDERED_EXPR:
4364 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4366 case ORDERED_EXPR:
4367 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4369 case UNLT_EXPR:
4370 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4372 case UNLE_EXPR:
4373 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4375 case UNGT_EXPR:
4376 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4378 case UNGE_EXPR:
4379 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4381 case UNEQ_EXPR:
4382 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4384 case LTGT_EXPR:
4385 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4387 case COND_EXPR:
4388 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4390 case COMPLEX_EXPR:
4391 gcc_assert (COMPLEX_MODE_P (mode));
4392 if (GET_MODE (op0) == VOIDmode)
4393 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4394 if (GET_MODE (op1) == VOIDmode)
4395 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4396 return gen_rtx_CONCAT (mode, op0, op1);
4398 case CONJ_EXPR:
4399 if (GET_CODE (op0) == CONCAT)
4400 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4401 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4402 XEXP (op0, 1),
4403 GET_MODE_INNER (mode)));
4404 else
4406 machine_mode imode = GET_MODE_INNER (mode);
4407 rtx re, im;
4409 if (MEM_P (op0))
4411 re = adjust_address_nv (op0, imode, 0);
4412 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4414 else
4416 machine_mode ifmode = int_mode_for_mode (mode);
4417 machine_mode ihmode = int_mode_for_mode (imode);
4418 rtx halfsize;
4419 if (ifmode == BLKmode || ihmode == BLKmode)
4420 return NULL;
4421 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4422 re = op0;
4423 if (mode != ifmode)
4424 re = gen_rtx_SUBREG (ifmode, re, 0);
4425 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4426 if (imode != ihmode)
4427 re = gen_rtx_SUBREG (imode, re, 0);
4428 im = copy_rtx (op0);
4429 if (mode != ifmode)
4430 im = gen_rtx_SUBREG (ifmode, im, 0);
4431 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4432 if (imode != ihmode)
4433 im = gen_rtx_SUBREG (imode, im, 0);
4435 im = gen_rtx_NEG (imode, im);
4436 return gen_rtx_CONCAT (mode, re, im);
4439 case ADDR_EXPR:
4440 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4441 if (!op0 || !MEM_P (op0))
4443 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4444 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4445 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4446 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4447 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4448 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4450 if (handled_component_p (TREE_OPERAND (exp, 0)))
4452 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4453 tree decl
4454 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
4455 &bitoffset, &bitsize, &maxsize);
4456 if ((TREE_CODE (decl) == VAR_DECL
4457 || TREE_CODE (decl) == PARM_DECL
4458 || TREE_CODE (decl) == RESULT_DECL)
4459 && (!TREE_ADDRESSABLE (decl)
4460 || target_for_debug_bind (decl))
4461 && (bitoffset % BITS_PER_UNIT) == 0
4462 && bitsize > 0
4463 && bitsize == maxsize)
4465 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4466 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4470 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4471 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4472 == ADDR_EXPR)
4474 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4475 0));
4476 if (op0 != NULL
4477 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4478 || (GET_CODE (op0) == PLUS
4479 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4480 && CONST_INT_P (XEXP (op0, 1)))))
4482 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4483 1));
4484 if (!op1 || !CONST_INT_P (op1))
4485 return NULL;
4487 return plus_constant (mode, op0, INTVAL (op1));
4491 return NULL;
4494 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4495 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4497 return op0;
4499 case VECTOR_CST:
4501 unsigned i;
4503 op0 = gen_rtx_CONCATN
4504 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4506 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4508 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4509 if (!op1)
4510 return NULL;
4511 XVECEXP (op0, 0, i) = op1;
4514 return op0;
4517 case CONSTRUCTOR:
4518 if (TREE_CLOBBER_P (exp))
4519 return NULL;
4520 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4522 unsigned i;
4523 tree val;
4525 op0 = gen_rtx_CONCATN
4526 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4528 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4530 op1 = expand_debug_expr (val);
4531 if (!op1)
4532 return NULL;
4533 XVECEXP (op0, 0, i) = op1;
4536 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4538 op1 = expand_debug_expr
4539 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4541 if (!op1)
4542 return NULL;
4544 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4545 XVECEXP (op0, 0, i) = op1;
4548 return op0;
4550 else
4551 goto flag_unsupported;
4553 case CALL_EXPR:
4554 /* ??? Maybe handle some builtins? */
4555 return NULL;
4557 case SSA_NAME:
4559 gimple g = get_gimple_for_ssa_name (exp);
4560 if (g)
4562 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
4563 if (!op0)
4564 return NULL;
4566 else
4568 int part = var_to_partition (SA.map, exp);
4570 if (part == NO_PARTITION)
4572 /* If this is a reference to an incoming value of parameter
4573 that is never used in the code or where the incoming
4574 value is never used in the code, use PARM_DECL's
4575 DECL_RTL if set. */
4576 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4577 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
4579 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4580 if (op0)
4581 goto adjust_mode;
4582 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4583 if (op0)
4584 goto adjust_mode;
4586 return NULL;
4589 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4591 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4593 goto adjust_mode;
4596 case ERROR_MARK:
4597 return NULL;
4599 /* Vector stuff. For most of the codes we don't have rtl codes. */
4600 case REALIGN_LOAD_EXPR:
4601 case REDUC_MAX_EXPR:
4602 case REDUC_MIN_EXPR:
4603 case REDUC_PLUS_EXPR:
4604 case VEC_COND_EXPR:
4605 case VEC_PACK_FIX_TRUNC_EXPR:
4606 case VEC_PACK_SAT_EXPR:
4607 case VEC_PACK_TRUNC_EXPR:
4608 case VEC_RSHIFT_EXPR:
4609 case VEC_UNPACK_FLOAT_HI_EXPR:
4610 case VEC_UNPACK_FLOAT_LO_EXPR:
4611 case VEC_UNPACK_HI_EXPR:
4612 case VEC_UNPACK_LO_EXPR:
4613 case VEC_WIDEN_MULT_HI_EXPR:
4614 case VEC_WIDEN_MULT_LO_EXPR:
4615 case VEC_WIDEN_MULT_EVEN_EXPR:
4616 case VEC_WIDEN_MULT_ODD_EXPR:
4617 case VEC_WIDEN_LSHIFT_HI_EXPR:
4618 case VEC_WIDEN_LSHIFT_LO_EXPR:
4619 case VEC_PERM_EXPR:
4620 return NULL;
4622 /* Misc codes. */
4623 case ADDR_SPACE_CONVERT_EXPR:
4624 case FIXED_CONVERT_EXPR:
4625 case OBJ_TYPE_REF:
4626 case WITH_SIZE_EXPR:
4627 return NULL;
4629 case DOT_PROD_EXPR:
4630 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4631 && SCALAR_INT_MODE_P (mode))
4634 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4635 0)))
4636 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4637 inner_mode);
4639 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4640 1)))
4641 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
4642 inner_mode);
4643 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4644 return simplify_gen_binary (PLUS, mode, op0, op2);
4646 return NULL;
4648 case WIDEN_MULT_EXPR:
4649 case WIDEN_MULT_PLUS_EXPR:
4650 case WIDEN_MULT_MINUS_EXPR:
4651 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4652 && SCALAR_INT_MODE_P (mode))
4654 inner_mode = GET_MODE (op0);
4655 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4656 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4657 else
4658 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4659 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
4660 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
4661 else
4662 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
4663 op0 = simplify_gen_binary (MULT, mode, op0, op1);
4664 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
4665 return op0;
4666 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
4667 return simplify_gen_binary (PLUS, mode, op0, op2);
4668 else
4669 return simplify_gen_binary (MINUS, mode, op2, op0);
4671 return NULL;
4673 case MULT_HIGHPART_EXPR:
4674 /* ??? Similar to the above. */
4675 return NULL;
4677 case WIDEN_SUM_EXPR:
4678 case WIDEN_LSHIFT_EXPR:
4679 if (SCALAR_INT_MODE_P (GET_MODE (op0))
4680 && SCALAR_INT_MODE_P (mode))
4683 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4684 0)))
4685 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
4686 inner_mode);
4687 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
4688 ? ASHIFT : PLUS, mode, op0, op1);
4690 return NULL;
4692 case FMA_EXPR:
4693 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
4695 default:
4696 flag_unsupported:
4697 #ifdef ENABLE_CHECKING
4698 debug_tree (exp);
4699 gcc_unreachable ();
4700 #else
4701 return NULL;
4702 #endif
4706 /* Return an RTX equivalent to the source bind value of the tree expression
4707 EXP. */
4709 static rtx
4710 expand_debug_source_expr (tree exp)
4712 rtx op0 = NULL_RTX;
4713 machine_mode mode = VOIDmode, inner_mode;
4715 switch (TREE_CODE (exp))
4717 case PARM_DECL:
4719 mode = DECL_MODE (exp);
4720 op0 = expand_debug_parm_decl (exp);
4721 if (op0)
4722 break;
4723 /* See if this isn't an argument that has been completely
4724 optimized out. */
4725 if (!DECL_RTL_SET_P (exp)
4726 && !DECL_INCOMING_RTL (exp)
4727 && DECL_ABSTRACT_ORIGIN (current_function_decl))
4729 tree aexp = DECL_ORIGIN (exp);
4730 if (DECL_CONTEXT (aexp)
4731 == DECL_ABSTRACT_ORIGIN (current_function_decl))
4733 vec<tree, va_gc> **debug_args;
4734 unsigned int ix;
4735 tree ddecl;
4736 debug_args = decl_debug_args_lookup (current_function_decl);
4737 if (debug_args != NULL)
4739 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
4740 ix += 2)
4741 if (ddecl == aexp)
4742 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
4746 break;
4748 default:
4749 break;
4752 if (op0 == NULL_RTX)
4753 return NULL_RTX;
4755 inner_mode = GET_MODE (op0);
4756 if (mode == inner_mode)
4757 return op0;
4759 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4761 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4762 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4763 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4764 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4765 else
4766 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4768 else if (FLOAT_MODE_P (mode))
4769 gcc_unreachable ();
4770 else if (FLOAT_MODE_P (inner_mode))
4772 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4773 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4774 else
4775 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4777 else if (CONSTANT_P (op0)
4778 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
4779 op0 = simplify_gen_subreg (mode, op0, inner_mode,
4780 subreg_lowpart_offset (mode, inner_mode));
4781 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
4782 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4783 else
4784 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4786 return op0;
4789 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
4790 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
4791 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
4793 static void
4794 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
4796 rtx exp = *exp_p;
4798 if (exp == NULL_RTX)
4799 return;
4801 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
4802 return;
4804 if (depth == 4)
4806 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
4807 rtx dval = make_debug_expr_from_rtl (exp);
4809 /* Emit a debug bind insn before INSN. */
4810 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
4811 DEBUG_EXPR_TREE_DECL (dval), exp,
4812 VAR_INIT_STATUS_INITIALIZED);
4814 emit_debug_insn_before (bind, insn);
4815 *exp_p = dval;
4816 return;
4819 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
4820 int i, j;
4821 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
4822 switch (*format_ptr++)
4824 case 'e':
4825 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
4826 break;
4828 case 'E':
4829 case 'V':
4830 for (j = 0; j < XVECLEN (exp, i); j++)
4831 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
4832 break;
4834 default:
4835 break;
4839 /* Expand the _LOCs in debug insns. We run this after expanding all
4840 regular insns, so that any variables referenced in the function
4841 will have their DECL_RTLs set. */
4843 static void
4844 expand_debug_locations (void)
4846 rtx_insn *insn;
4847 rtx_insn *last = get_last_insn ();
4848 int save_strict_alias = flag_strict_aliasing;
4850 /* New alias sets while setting up memory attributes cause
4851 -fcompare-debug failures, even though it doesn't bring about any
4852 codegen changes. */
4853 flag_strict_aliasing = 0;
4855 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4856 if (DEBUG_INSN_P (insn))
4858 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
4859 rtx val;
4860 rtx_insn *prev_insn, *insn2;
4861 machine_mode mode;
4863 if (value == NULL_TREE)
4864 val = NULL_RTX;
4865 else
4867 if (INSN_VAR_LOCATION_STATUS (insn)
4868 == VAR_INIT_STATUS_UNINITIALIZED)
4869 val = expand_debug_source_expr (value);
4870 else
4871 val = expand_debug_expr (value);
4872 gcc_assert (last == get_last_insn ());
4875 if (!val)
4876 val = gen_rtx_UNKNOWN_VAR_LOC ();
4877 else
4879 mode = GET_MODE (INSN_VAR_LOCATION (insn));
4881 gcc_assert (mode == GET_MODE (val)
4882 || (GET_MODE (val) == VOIDmode
4883 && (CONST_SCALAR_INT_P (val)
4884 || GET_CODE (val) == CONST_FIXED
4885 || GET_CODE (val) == LABEL_REF)));
4888 INSN_VAR_LOCATION_LOC (insn) = val;
4889 prev_insn = PREV_INSN (insn);
4890 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
4891 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
4894 flag_strict_aliasing = save_strict_alias;
4897 /* Expand basic block BB from GIMPLE trees to RTL. */
4899 static basic_block
4900 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
4902 gimple_stmt_iterator gsi;
4903 gimple_seq stmts;
4904 gimple stmt = NULL;
4905 rtx_note *note;
4906 rtx_insn *last;
4907 edge e;
4908 edge_iterator ei;
4910 if (dump_file)
4911 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
4912 bb->index);
4914 /* Note that since we are now transitioning from GIMPLE to RTL, we
4915 cannot use the gsi_*_bb() routines because they expect the basic
4916 block to be in GIMPLE, instead of RTL. Therefore, we need to
4917 access the BB sequence directly. */
4918 stmts = bb_seq (bb);
4919 bb->il.gimple.seq = NULL;
4920 bb->il.gimple.phi_nodes = NULL;
4921 rtl_profile_for_bb (bb);
4922 init_rtl_bb_info (bb);
4923 bb->flags |= BB_RTL;
4925 /* Remove the RETURN_EXPR if we may fall though to the exit
4926 instead. */
4927 gsi = gsi_last (stmts);
4928 if (!gsi_end_p (gsi)
4929 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
4931 gimple ret_stmt = gsi_stmt (gsi);
4933 gcc_assert (single_succ_p (bb));
4934 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
4936 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
4937 && !gimple_return_retval (ret_stmt))
4939 gsi_remove (&gsi, false);
4940 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
4944 gsi = gsi_start (stmts);
4945 if (!gsi_end_p (gsi))
4947 stmt = gsi_stmt (gsi);
4948 if (gimple_code (stmt) != GIMPLE_LABEL)
4949 stmt = NULL;
4952 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
4954 if (stmt || elt)
4956 last = get_last_insn ();
4958 if (stmt)
4960 expand_gimple_stmt (stmt);
4961 gsi_next (&gsi);
4964 if (elt)
4965 emit_label (*elt);
4967 /* Java emits line number notes in the top of labels.
4968 ??? Make this go away once line number notes are obsoleted. */
4969 BB_HEAD (bb) = NEXT_INSN (last);
4970 if (NOTE_P (BB_HEAD (bb)))
4971 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
4972 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
4974 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4976 else
4977 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
4979 NOTE_BASIC_BLOCK (note) = bb;
4981 for (; !gsi_end_p (gsi); gsi_next (&gsi))
4983 basic_block new_bb;
4985 stmt = gsi_stmt (gsi);
4987 /* If this statement is a non-debug one, and we generate debug
4988 insns, then this one might be the last real use of a TERed
4989 SSA_NAME, but where there are still some debug uses further
4990 down. Expanding the current SSA name in such further debug
4991 uses by their RHS might lead to wrong debug info, as coalescing
4992 might make the operands of such RHS be placed into the same
4993 pseudo as something else. Like so:
4994 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
4995 use(a_1);
4996 a_2 = ...
4997 #DEBUG ... => a_1
4998 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
4999 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5000 the write to a_2 would actually have clobbered the place which
5001 formerly held a_0.
5003 So, instead of that, we recognize the situation, and generate
5004 debug temporaries at the last real use of TERed SSA names:
5005 a_1 = a_0 + 1;
5006 #DEBUG #D1 => a_1
5007 use(a_1);
5008 a_2 = ...
5009 #DEBUG ... => #D1
5011 if (MAY_HAVE_DEBUG_INSNS
5012 && SA.values
5013 && !is_gimple_debug (stmt))
5015 ssa_op_iter iter;
5016 tree op;
5017 gimple def;
5019 location_t sloc = curr_insn_location ();
5021 /* Look for SSA names that have their last use here (TERed
5022 names always have only one real use). */
5023 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5024 if ((def = get_gimple_for_ssa_name (op)))
5026 imm_use_iterator imm_iter;
5027 use_operand_p use_p;
5028 bool have_debug_uses = false;
5030 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5032 if (gimple_debug_bind_p (USE_STMT (use_p)))
5034 have_debug_uses = true;
5035 break;
5039 if (have_debug_uses)
5041 /* OP is a TERed SSA name, with DEF it's defining
5042 statement, and where OP is used in further debug
5043 instructions. Generate a debug temporary, and
5044 replace all uses of OP in debug insns with that
5045 temporary. */
5046 gimple debugstmt;
5047 tree value = gimple_assign_rhs_to_tree (def);
5048 tree vexpr = make_node (DEBUG_EXPR_DECL);
5049 rtx val;
5050 machine_mode mode;
5052 set_curr_insn_location (gimple_location (def));
5054 DECL_ARTIFICIAL (vexpr) = 1;
5055 TREE_TYPE (vexpr) = TREE_TYPE (value);
5056 if (DECL_P (value))
5057 mode = DECL_MODE (value);
5058 else
5059 mode = TYPE_MODE (TREE_TYPE (value));
5060 DECL_MODE (vexpr) = mode;
5062 val = gen_rtx_VAR_LOCATION
5063 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5065 emit_debug_insn (val);
5067 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5069 if (!gimple_debug_bind_p (debugstmt))
5070 continue;
5072 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5073 SET_USE (use_p, vexpr);
5075 update_stmt (debugstmt);
5079 set_curr_insn_location (sloc);
5082 currently_expanding_gimple_stmt = stmt;
5084 /* Expand this statement, then evaluate the resulting RTL and
5085 fixup the CFG accordingly. */
5086 if (gimple_code (stmt) == GIMPLE_COND)
5088 new_bb = expand_gimple_cond (bb, stmt);
5089 if (new_bb)
5090 return new_bb;
5092 else if (gimple_debug_bind_p (stmt))
5094 location_t sloc = curr_insn_location ();
5095 gimple_stmt_iterator nsi = gsi;
5097 for (;;)
5099 tree var = gimple_debug_bind_get_var (stmt);
5100 tree value;
5101 rtx val;
5102 machine_mode mode;
5104 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5105 && TREE_CODE (var) != LABEL_DECL
5106 && !target_for_debug_bind (var))
5107 goto delink_debug_stmt;
5109 if (gimple_debug_bind_has_value_p (stmt))
5110 value = gimple_debug_bind_get_value (stmt);
5111 else
5112 value = NULL_TREE;
5114 last = get_last_insn ();
5116 set_curr_insn_location (gimple_location (stmt));
5118 if (DECL_P (var))
5119 mode = DECL_MODE (var);
5120 else
5121 mode = TYPE_MODE (TREE_TYPE (var));
5123 val = gen_rtx_VAR_LOCATION
5124 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5126 emit_debug_insn (val);
5128 if (dump_file && (dump_flags & TDF_DETAILS))
5130 /* We can't dump the insn with a TREE where an RTX
5131 is expected. */
5132 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5133 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5134 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5137 delink_debug_stmt:
5138 /* In order not to generate too many debug temporaries,
5139 we delink all uses of debug statements we already expanded.
5140 Therefore debug statements between definition and real
5141 use of TERed SSA names will continue to use the SSA name,
5142 and not be replaced with debug temps. */
5143 delink_stmt_imm_use (stmt);
5145 gsi = nsi;
5146 gsi_next (&nsi);
5147 if (gsi_end_p (nsi))
5148 break;
5149 stmt = gsi_stmt (nsi);
5150 if (!gimple_debug_bind_p (stmt))
5151 break;
5154 set_curr_insn_location (sloc);
5156 else if (gimple_debug_source_bind_p (stmt))
5158 location_t sloc = curr_insn_location ();
5159 tree var = gimple_debug_source_bind_get_var (stmt);
5160 tree value = gimple_debug_source_bind_get_value (stmt);
5161 rtx val;
5162 machine_mode mode;
5164 last = get_last_insn ();
5166 set_curr_insn_location (gimple_location (stmt));
5168 mode = DECL_MODE (var);
5170 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5171 VAR_INIT_STATUS_UNINITIALIZED);
5173 emit_debug_insn (val);
5175 if (dump_file && (dump_flags & TDF_DETAILS))
5177 /* We can't dump the insn with a TREE where an RTX
5178 is expected. */
5179 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5180 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5181 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5184 set_curr_insn_location (sloc);
5186 else
5188 if (is_gimple_call (stmt)
5189 && gimple_call_tail_p (stmt)
5190 && disable_tail_calls)
5191 gimple_call_set_tail (stmt, false);
5193 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
5195 bool can_fallthru;
5196 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
5197 if (new_bb)
5199 if (can_fallthru)
5200 bb = new_bb;
5201 else
5202 return new_bb;
5205 else
5207 def_operand_p def_p;
5208 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5210 if (def_p != NULL)
5212 /* Ignore this stmt if it is in the list of
5213 replaceable expressions. */
5214 if (SA.values
5215 && bitmap_bit_p (SA.values,
5216 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5217 continue;
5219 last = expand_gimple_stmt (stmt);
5220 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5225 currently_expanding_gimple_stmt = NULL;
5227 /* Expand implicit goto and convert goto_locus. */
5228 FOR_EACH_EDGE (e, ei, bb->succs)
5230 if (e->goto_locus != UNKNOWN_LOCATION)
5231 set_curr_insn_location (e->goto_locus);
5232 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5234 emit_jump (label_rtx_for_bb (e->dest));
5235 e->flags &= ~EDGE_FALLTHRU;
5239 /* Expanded RTL can create a jump in the last instruction of block.
5240 This later might be assumed to be a jump to successor and break edge insertion.
5241 We need to insert dummy move to prevent this. PR41440. */
5242 if (single_succ_p (bb)
5243 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5244 && (last = get_last_insn ())
5245 && JUMP_P (last))
5247 rtx dummy = gen_reg_rtx (SImode);
5248 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5251 do_pending_stack_adjust ();
5253 /* Find the block tail. The last insn in the block is the insn
5254 before a barrier and/or table jump insn. */
5255 last = get_last_insn ();
5256 if (BARRIER_P (last))
5257 last = PREV_INSN (last);
5258 if (JUMP_TABLE_DATA_P (last))
5259 last = PREV_INSN (PREV_INSN (last));
5260 BB_END (bb) = last;
5262 update_bb_for_insn (bb);
5264 return bb;
5268 /* Create a basic block for initialization code. */
5270 static basic_block
5271 construct_init_block (void)
5273 basic_block init_block, first_block;
5274 edge e = NULL;
5275 int flags;
5277 /* Multiple entry points not supported yet. */
5278 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5279 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5280 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5281 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5282 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5284 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5286 /* When entry edge points to first basic block, we don't need jump,
5287 otherwise we have to jump into proper target. */
5288 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5290 tree label = gimple_block_label (e->dest);
5292 emit_jump (label_rtx (label));
5293 flags = 0;
5295 else
5296 flags = EDGE_FALLTHRU;
5298 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5299 get_last_insn (),
5300 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5301 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5302 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5303 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5304 if (e)
5306 first_block = e->dest;
5307 redirect_edge_succ (e, init_block);
5308 e = make_edge (init_block, first_block, flags);
5310 else
5311 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5312 e->probability = REG_BR_PROB_BASE;
5313 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5315 update_bb_for_insn (init_block);
5316 return init_block;
5319 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5320 found in the block tree. */
5322 static void
5323 set_block_levels (tree block, int level)
5325 while (block)
5327 BLOCK_NUMBER (block) = level;
5328 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5329 block = BLOCK_CHAIN (block);
5333 /* Create a block containing landing pads and similar stuff. */
5335 static void
5336 construct_exit_block (void)
5338 rtx_insn *head = get_last_insn ();
5339 rtx_insn *end;
5340 basic_block exit_block;
5341 edge e, e2;
5342 unsigned ix;
5343 edge_iterator ei;
5344 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5345 rtx_insn *orig_end = BB_END (prev_bb);
5347 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5349 /* Make sure the locus is set to the end of the function, so that
5350 epilogue line numbers and warnings are set properly. */
5351 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5352 input_location = cfun->function_end_locus;
5354 /* Generate rtl for function exit. */
5355 expand_function_end ();
5357 end = get_last_insn ();
5358 if (head == end)
5359 return;
5360 /* While emitting the function end we could move end of the last basic
5361 block. */
5362 BB_END (prev_bb) = orig_end;
5363 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5364 head = NEXT_INSN (head);
5365 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5366 bb frequency counting will be confused. Any instructions before that
5367 label are emitted for the case where PREV_BB falls through into the
5368 exit block, so append those instructions to prev_bb in that case. */
5369 if (NEXT_INSN (head) != return_label)
5371 while (NEXT_INSN (head) != return_label)
5373 if (!NOTE_P (NEXT_INSN (head)))
5374 BB_END (prev_bb) = NEXT_INSN (head);
5375 head = NEXT_INSN (head);
5378 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5379 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5380 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5381 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5383 ix = 0;
5384 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5386 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5387 if (!(e->flags & EDGE_ABNORMAL))
5388 redirect_edge_succ (e, exit_block);
5389 else
5390 ix++;
5393 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5394 e->probability = REG_BR_PROB_BASE;
5395 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5396 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5397 if (e2 != e)
5399 e->count -= e2->count;
5400 exit_block->count -= e2->count;
5401 exit_block->frequency -= EDGE_FREQUENCY (e2);
5403 if (e->count < 0)
5404 e->count = 0;
5405 if (exit_block->count < 0)
5406 exit_block->count = 0;
5407 if (exit_block->frequency < 0)
5408 exit_block->frequency = 0;
5409 update_bb_for_insn (exit_block);
5412 /* Helper function for discover_nonconstant_array_refs.
5413 Look for ARRAY_REF nodes with non-constant indexes and mark them
5414 addressable. */
5416 static tree
5417 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5418 void *data ATTRIBUTE_UNUSED)
5420 tree t = *tp;
5422 if (IS_TYPE_OR_DECL_P (t))
5423 *walk_subtrees = 0;
5424 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5426 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5427 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5428 && (!TREE_OPERAND (t, 2)
5429 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5430 || (TREE_CODE (t) == COMPONENT_REF
5431 && (!TREE_OPERAND (t,2)
5432 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5433 || TREE_CODE (t) == BIT_FIELD_REF
5434 || TREE_CODE (t) == REALPART_EXPR
5435 || TREE_CODE (t) == IMAGPART_EXPR
5436 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5437 || CONVERT_EXPR_P (t))
5438 t = TREE_OPERAND (t, 0);
5440 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5442 t = get_base_address (t);
5443 if (t && DECL_P (t)
5444 && DECL_MODE (t) != BLKmode)
5445 TREE_ADDRESSABLE (t) = 1;
5448 *walk_subtrees = 0;
5451 return NULL_TREE;
5454 /* RTL expansion is not able to compile array references with variable
5455 offsets for arrays stored in single register. Discover such
5456 expressions and mark variables as addressable to avoid this
5457 scenario. */
5459 static void
5460 discover_nonconstant_array_refs (void)
5462 basic_block bb;
5463 gimple_stmt_iterator gsi;
5465 FOR_EACH_BB_FN (bb, cfun)
5466 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5468 gimple stmt = gsi_stmt (gsi);
5469 if (!is_gimple_debug (stmt))
5470 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5474 /* This function sets crtl->args.internal_arg_pointer to a virtual
5475 register if DRAP is needed. Local register allocator will replace
5476 virtual_incoming_args_rtx with the virtual register. */
5478 static void
5479 expand_stack_alignment (void)
5481 rtx drap_rtx;
5482 unsigned int preferred_stack_boundary;
5484 if (! SUPPORTS_STACK_ALIGNMENT)
5485 return;
5487 if (cfun->calls_alloca
5488 || cfun->has_nonlocal_label
5489 || crtl->has_nonlocal_goto)
5490 crtl->need_drap = true;
5492 /* Call update_stack_boundary here again to update incoming stack
5493 boundary. It may set incoming stack alignment to a different
5494 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5495 use the minimum incoming stack alignment to check if it is OK
5496 to perform sibcall optimization since sibcall optimization will
5497 only align the outgoing stack to incoming stack boundary. */
5498 if (targetm.calls.update_stack_boundary)
5499 targetm.calls.update_stack_boundary ();
5501 /* The incoming stack frame has to be aligned at least at
5502 parm_stack_boundary. */
5503 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5505 /* Update crtl->stack_alignment_estimated and use it later to align
5506 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5507 exceptions since callgraph doesn't collect incoming stack alignment
5508 in this case. */
5509 if (cfun->can_throw_non_call_exceptions
5510 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
5511 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
5512 else
5513 preferred_stack_boundary = crtl->preferred_stack_boundary;
5514 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
5515 crtl->stack_alignment_estimated = preferred_stack_boundary;
5516 if (preferred_stack_boundary > crtl->stack_alignment_needed)
5517 crtl->stack_alignment_needed = preferred_stack_boundary;
5519 gcc_assert (crtl->stack_alignment_needed
5520 <= crtl->stack_alignment_estimated);
5522 crtl->stack_realign_needed
5523 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
5524 crtl->stack_realign_tried = crtl->stack_realign_needed;
5526 crtl->stack_realign_processed = true;
5528 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
5529 alignment. */
5530 gcc_assert (targetm.calls.get_drap_rtx != NULL);
5531 drap_rtx = targetm.calls.get_drap_rtx ();
5533 /* stack_realign_drap and drap_rtx must match. */
5534 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
5536 /* Do nothing if NULL is returned, which means DRAP is not needed. */
5537 if (NULL != drap_rtx)
5539 crtl->args.internal_arg_pointer = drap_rtx;
5541 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
5542 needed. */
5543 fixup_tail_calls ();
5548 static void
5549 expand_main_function (void)
5551 #if (defined(INVOKE__main) \
5552 || (!defined(HAS_INIT_SECTION) \
5553 && !defined(INIT_SECTION_ASM_OP) \
5554 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
5555 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
5556 #endif
5560 /* Expand code to initialize the stack_protect_guard. This is invoked at
5561 the beginning of a function to be protected. */
5563 #ifndef HAVE_stack_protect_set
5564 # define HAVE_stack_protect_set 0
5565 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
5566 #endif
5568 static void
5569 stack_protect_prologue (void)
5571 tree guard_decl = targetm.stack_protect_guard ();
5572 rtx x, y;
5574 x = expand_normal (crtl->stack_protect_guard);
5575 y = expand_normal (guard_decl);
5577 /* Allow the target to copy from Y to X without leaking Y into a
5578 register. */
5579 if (HAVE_stack_protect_set)
5581 rtx insn = gen_stack_protect_set (x, y);
5582 if (insn)
5584 emit_insn (insn);
5585 return;
5589 /* Otherwise do a straight move. */
5590 emit_move_insn (x, y);
5593 /* Translate the intermediate representation contained in the CFG
5594 from GIMPLE trees to RTL.
5596 We do conversion per basic block and preserve/update the tree CFG.
5597 This implies we have to do some magic as the CFG can simultaneously
5598 consist of basic blocks containing RTL and GIMPLE trees. This can
5599 confuse the CFG hooks, so be careful to not manipulate CFG during
5600 the expansion. */
5602 namespace {
5604 const pass_data pass_data_expand =
5606 RTL_PASS, /* type */
5607 "expand", /* name */
5608 OPTGROUP_NONE, /* optinfo_flags */
5609 TV_EXPAND, /* tv_id */
5610 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
5611 | PROP_gimple_lcx
5612 | PROP_gimple_lvec ), /* properties_required */
5613 PROP_rtl, /* properties_provided */
5614 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
5615 0, /* todo_flags_start */
5616 0, /* todo_flags_finish */
5619 class pass_expand : public rtl_opt_pass
5621 public:
5622 pass_expand (gcc::context *ctxt)
5623 : rtl_opt_pass (pass_data_expand, ctxt)
5626 /* opt_pass methods: */
5627 virtual unsigned int execute (function *);
5629 }; // class pass_expand
5631 unsigned int
5632 pass_expand::execute (function *fun)
5634 basic_block bb, init_block;
5635 sbitmap blocks;
5636 edge_iterator ei;
5637 edge e;
5638 rtx_insn *var_seq, *var_ret_seq;
5639 unsigned i;
5641 timevar_push (TV_OUT_OF_SSA);
5642 rewrite_out_of_ssa (&SA);
5643 timevar_pop (TV_OUT_OF_SSA);
5644 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
5646 /* Make sure all values used by the optimization passes have sane
5647 defaults. */
5648 reg_renumber = 0;
5650 /* Some backends want to know that we are expanding to RTL. */
5651 currently_expanding_to_rtl = 1;
5652 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
5653 free_dominance_info (CDI_DOMINATORS);
5655 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
5657 insn_locations_init ();
5658 if (!DECL_IS_BUILTIN (current_function_decl))
5660 /* Eventually, all FEs should explicitly set function_start_locus. */
5661 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
5662 set_curr_insn_location
5663 (DECL_SOURCE_LOCATION (current_function_decl));
5664 else
5665 set_curr_insn_location (fun->function_start_locus);
5667 else
5668 set_curr_insn_location (UNKNOWN_LOCATION);
5669 prologue_location = curr_insn_location ();
5671 #ifdef INSN_SCHEDULING
5672 init_sched_attrs ();
5673 #endif
5675 /* Make sure first insn is a note even if we don't want linenums.
5676 This makes sure the first insn will never be deleted.
5677 Also, final expects a note to appear there. */
5678 emit_note (NOTE_INSN_DELETED);
5680 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
5681 discover_nonconstant_array_refs ();
5683 targetm.expand_to_rtl_hook ();
5684 crtl->stack_alignment_needed = STACK_BOUNDARY;
5685 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
5686 crtl->stack_alignment_estimated = 0;
5687 crtl->preferred_stack_boundary = STACK_BOUNDARY;
5688 fun->cfg->max_jumptable_ents = 0;
5690 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
5691 of the function section at exapnsion time to predict distance of calls. */
5692 resolve_unique_section (current_function_decl, 0, flag_function_sections);
5694 /* Expand the variables recorded during gimple lowering. */
5695 timevar_push (TV_VAR_EXPAND);
5696 start_sequence ();
5698 var_ret_seq = expand_used_vars ();
5700 var_seq = get_insns ();
5701 end_sequence ();
5702 timevar_pop (TV_VAR_EXPAND);
5704 /* Honor stack protection warnings. */
5705 if (warn_stack_protect)
5707 if (fun->calls_alloca)
5708 warning (OPT_Wstack_protector,
5709 "stack protector not protecting local variables: "
5710 "variable length buffer");
5711 if (has_short_buffer && !crtl->stack_protect_guard)
5712 warning (OPT_Wstack_protector,
5713 "stack protector not protecting function: "
5714 "all local arrays are less than %d bytes long",
5715 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
5718 /* Set up parameters and prepare for return, for the function. */
5719 expand_function_start (current_function_decl);
5721 /* If we emitted any instructions for setting up the variables,
5722 emit them before the FUNCTION_START note. */
5723 if (var_seq)
5725 emit_insn_before (var_seq, parm_birth_insn);
5727 /* In expand_function_end we'll insert the alloca save/restore
5728 before parm_birth_insn. We've just insertted an alloca call.
5729 Adjust the pointer to match. */
5730 parm_birth_insn = var_seq;
5733 /* Now that we also have the parameter RTXs, copy them over to our
5734 partitions. */
5735 for (i = 0; i < SA.map->num_partitions; i++)
5737 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
5739 if (TREE_CODE (var) != VAR_DECL
5740 && !SA.partition_to_pseudo[i])
5741 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
5742 gcc_assert (SA.partition_to_pseudo[i]);
5744 /* If this decl was marked as living in multiple places, reset
5745 this now to NULL. */
5746 if (DECL_RTL_IF_SET (var) == pc_rtx)
5747 SET_DECL_RTL (var, NULL);
5749 /* Some RTL parts really want to look at DECL_RTL(x) when x
5750 was a decl marked in REG_ATTR or MEM_ATTR. We could use
5751 SET_DECL_RTL here making this available, but that would mean
5752 to select one of the potentially many RTLs for one DECL. Instead
5753 of doing that we simply reset the MEM_EXPR of the RTL in question,
5754 then nobody can get at it and hence nobody can call DECL_RTL on it. */
5755 if (!DECL_RTL_SET_P (var))
5757 if (MEM_P (SA.partition_to_pseudo[i]))
5758 set_mem_expr (SA.partition_to_pseudo[i], NULL);
5762 /* If we have a class containing differently aligned pointers
5763 we need to merge those into the corresponding RTL pointer
5764 alignment. */
5765 for (i = 1; i < num_ssa_names; i++)
5767 tree name = ssa_name (i);
5768 int part;
5769 rtx r;
5771 if (!name
5772 /* We might have generated new SSA names in
5773 update_alias_info_with_stack_vars. They will have a NULL
5774 defining statements, and won't be part of the partitioning,
5775 so ignore those. */
5776 || !SSA_NAME_DEF_STMT (name))
5777 continue;
5778 part = var_to_partition (SA.map, name);
5779 if (part == NO_PARTITION)
5780 continue;
5782 /* Adjust all partition members to get the underlying decl of
5783 the representative which we might have created in expand_one_var. */
5784 if (SSA_NAME_VAR (name) == NULL_TREE)
5786 tree leader = partition_to_var (SA.map, part);
5787 gcc_assert (SSA_NAME_VAR (leader) != NULL_TREE);
5788 replace_ssa_name_symbol (name, SSA_NAME_VAR (leader));
5790 if (!POINTER_TYPE_P (TREE_TYPE (name)))
5791 continue;
5793 r = SA.partition_to_pseudo[part];
5794 if (REG_P (r))
5795 mark_reg_pointer (r, get_pointer_alignment (name));
5798 /* If this function is `main', emit a call to `__main'
5799 to run global initializers, etc. */
5800 if (DECL_NAME (current_function_decl)
5801 && MAIN_NAME_P (DECL_NAME (current_function_decl))
5802 && DECL_FILE_SCOPE_P (current_function_decl))
5803 expand_main_function ();
5805 /* Initialize the stack_protect_guard field. This must happen after the
5806 call to __main (if any) so that the external decl is initialized. */
5807 if (crtl->stack_protect_guard)
5808 stack_protect_prologue ();
5810 expand_phi_nodes (&SA);
5812 /* Register rtl specific functions for cfg. */
5813 rtl_register_cfg_hooks ();
5815 init_block = construct_init_block ();
5817 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
5818 remaining edges later. */
5819 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
5820 e->flags &= ~EDGE_EXECUTABLE;
5822 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
5823 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
5824 next_bb)
5825 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
5827 if (MAY_HAVE_DEBUG_INSNS)
5828 expand_debug_locations ();
5830 /* Free stuff we no longer need after GIMPLE optimizations. */
5831 free_dominance_info (CDI_DOMINATORS);
5832 free_dominance_info (CDI_POST_DOMINATORS);
5833 delete_tree_cfg_annotations ();
5835 timevar_push (TV_OUT_OF_SSA);
5836 finish_out_of_ssa (&SA);
5837 timevar_pop (TV_OUT_OF_SSA);
5839 timevar_push (TV_POST_EXPAND);
5840 /* We are no longer in SSA form. */
5841 fun->gimple_df->in_ssa_p = false;
5842 loops_state_clear (LOOP_CLOSED_SSA);
5844 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
5845 conservatively to true until they are all profile aware. */
5846 delete lab_rtx_for_bb;
5847 free_histograms ();
5849 construct_exit_block ();
5850 insn_locations_finalize ();
5852 if (var_ret_seq)
5854 rtx_insn *after = return_label;
5855 rtx_insn *next = NEXT_INSN (after);
5856 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
5857 after = next;
5858 emit_insn_after (var_ret_seq, after);
5861 /* Zap the tree EH table. */
5862 set_eh_throw_stmt_table (fun, NULL);
5864 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
5865 split edges which edge insertions might do. */
5866 rebuild_jump_labels (get_insns ());
5868 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
5869 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
5871 edge e;
5872 edge_iterator ei;
5873 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5875 if (e->insns.r)
5877 rebuild_jump_labels_chain (e->insns.r);
5878 /* Put insns after parm birth, but before
5879 NOTE_INSNS_FUNCTION_BEG. */
5880 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
5881 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
5883 rtx_insn *insns = e->insns.r;
5884 e->insns.r = NULL;
5885 if (NOTE_P (parm_birth_insn)
5886 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
5887 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
5888 else
5889 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
5891 else
5892 commit_one_edge_insertion (e);
5894 else
5895 ei_next (&ei);
5899 /* We're done expanding trees to RTL. */
5900 currently_expanding_to_rtl = 0;
5902 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
5903 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
5905 edge e;
5906 edge_iterator ei;
5907 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
5909 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
5910 e->flags &= ~EDGE_EXECUTABLE;
5912 /* At the moment not all abnormal edges match the RTL
5913 representation. It is safe to remove them here as
5914 find_many_sub_basic_blocks will rediscover them.
5915 In the future we should get this fixed properly. */
5916 if ((e->flags & EDGE_ABNORMAL)
5917 && !(e->flags & EDGE_SIBCALL))
5918 remove_edge (e);
5919 else
5920 ei_next (&ei);
5924 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
5925 bitmap_ones (blocks);
5926 find_many_sub_basic_blocks (blocks);
5927 sbitmap_free (blocks);
5928 purge_all_dead_edges ();
5930 expand_stack_alignment ();
5932 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
5933 function. */
5934 if (crtl->tail_call_emit)
5935 fixup_tail_calls ();
5937 /* After initial rtl generation, call back to finish generating
5938 exception support code. We need to do this before cleaning up
5939 the CFG as the code does not expect dead landing pads. */
5940 if (fun->eh->region_tree != NULL)
5941 finish_eh_generation ();
5943 /* Remove unreachable blocks, otherwise we cannot compute dominators
5944 which are needed for loop state verification. As a side-effect
5945 this also compacts blocks.
5946 ??? We cannot remove trivially dead insns here as for example
5947 the DRAP reg on i?86 is not magically live at this point.
5948 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
5949 cleanup_cfg (CLEANUP_NO_INSN_DEL);
5951 #ifdef ENABLE_CHECKING
5952 verify_flow_info ();
5953 #endif
5955 /* Initialize pseudos allocated for hard registers. */
5956 emit_initial_value_sets ();
5958 /* And finally unshare all RTL. */
5959 unshare_all_rtl ();
5961 /* There's no need to defer outputting this function any more; we
5962 know we want to output it. */
5963 DECL_DEFER_OUTPUT (current_function_decl) = 0;
5965 /* Now that we're done expanding trees to RTL, we shouldn't have any
5966 more CONCATs anywhere. */
5967 generating_concat_p = 0;
5969 if (dump_file)
5971 fprintf (dump_file,
5972 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
5973 /* And the pass manager will dump RTL for us. */
5976 /* If we're emitting a nested function, make sure its parent gets
5977 emitted as well. Doing otherwise confuses debug info. */
5979 tree parent;
5980 for (parent = DECL_CONTEXT (current_function_decl);
5981 parent != NULL_TREE;
5982 parent = get_containing_scope (parent))
5983 if (TREE_CODE (parent) == FUNCTION_DECL)
5984 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
5987 /* We are now committed to emitting code for this function. Do any
5988 preparation, such as emitting abstract debug info for the inline
5989 before it gets mangled by optimization. */
5990 if (cgraph_function_possibly_inlined_p (current_function_decl))
5991 (*debug_hooks->outlining_inline_function) (current_function_decl);
5993 TREE_ASM_WRITTEN (current_function_decl) = 1;
5995 /* After expanding, the return labels are no longer needed. */
5996 return_label = NULL;
5997 naked_return_label = NULL;
5999 /* After expanding, the tm_restart map is no longer needed. */
6000 if (fun->gimple_df->tm_restart)
6002 htab_delete (fun->gimple_df->tm_restart);
6003 fun->gimple_df->tm_restart = NULL;
6006 /* Tag the blocks with a depth number so that change_scope can find
6007 the common parent easily. */
6008 set_block_levels (DECL_INITIAL (fun->decl), 0);
6009 default_rtl_profile ();
6011 timevar_pop (TV_POST_EXPAND);
6013 return 0;
6016 } // anon namespace
6018 rtl_opt_pass *
6019 make_pass_expand (gcc::context *ctxt)
6021 return new pass_expand (ctxt);