[34/77] Add a SCALAR_INT_TYPE_MODE macro
[official-gcc.git] / gcc / expr.c
blobb68402fe9d6fa520b6513f07421e47ae90ff5a12
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
60 #include "builtins.h"
61 #include "tree-chkp.h"
62 #include "rtl-chkp.h"
63 #include "ccmp.h"
66 /* If this is nonzero, we do not bother generating VOLATILE
67 around volatile memory references, and we are willing to
68 output indirect addresses. If cse is to follow, we reject
69 indirect addresses so a useful potential cse is generated;
70 if it is used only once, instruction combination will produce
71 the same indirect address eventually. */
72 int cse_not_expected;
74 static bool block_move_libcall_safe_for_call_parm (void);
75 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
76 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
77 unsigned HOST_WIDE_INT);
78 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
79 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
80 static rtx_insn *compress_float_constant (rtx, rtx);
81 static rtx get_subtarget (rtx);
82 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
83 HOST_WIDE_INT, unsigned HOST_WIDE_INT,
84 unsigned HOST_WIDE_INT, machine_mode,
85 tree, int, alias_set_type, bool);
86 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
87 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
88 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
89 machine_mode, tree, alias_set_type, bool, bool);
91 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
93 static int is_aligning_offset (const_tree, const_tree);
94 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
95 static rtx do_store_flag (sepops, rtx, machine_mode);
96 #ifdef PUSH_ROUNDING
97 static void emit_single_push_insn (machine_mode, rtx, tree);
98 #endif
99 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
100 profile_probability);
101 static rtx const_vector_from_tree (tree);
102 static rtx const_scalar_mask_from_tree (tree);
103 static tree tree_expr_size (const_tree);
104 static HOST_WIDE_INT int_expr_size (tree);
107 /* This is run to set up which modes can be used
108 directly in memory and to initialize the block move optab. It is run
109 at the beginning of compilation and when the target is reinitialized. */
111 void
112 init_expr_target (void)
114 rtx pat;
115 int num_clobbers;
116 rtx mem, mem1;
117 rtx reg;
119 /* Try indexing by frame ptr and try by stack ptr.
120 It is known that on the Convex the stack ptr isn't a valid index.
121 With luck, one or the other is valid on any machine. */
122 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
123 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
125 /* A scratch register we can modify in-place below to avoid
126 useless RTL allocations. */
127 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
129 rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
130 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
131 PATTERN (insn) = pat;
133 for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
134 mode = (machine_mode) ((int) mode + 1))
136 int regno;
138 direct_load[(int) mode] = direct_store[(int) mode] = 0;
139 PUT_MODE (mem, mode);
140 PUT_MODE (mem1, mode);
142 /* See if there is some register that can be used in this mode and
143 directly loaded or stored from memory. */
145 if (mode != VOIDmode && mode != BLKmode)
146 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
147 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
148 regno++)
150 if (! HARD_REGNO_MODE_OK (regno, mode))
151 continue;
153 set_mode_and_regno (reg, mode, regno);
155 SET_SRC (pat) = mem;
156 SET_DEST (pat) = reg;
157 if (recog (pat, insn, &num_clobbers) >= 0)
158 direct_load[(int) mode] = 1;
160 SET_SRC (pat) = mem1;
161 SET_DEST (pat) = reg;
162 if (recog (pat, insn, &num_clobbers) >= 0)
163 direct_load[(int) mode] = 1;
165 SET_SRC (pat) = reg;
166 SET_DEST (pat) = mem;
167 if (recog (pat, insn, &num_clobbers) >= 0)
168 direct_store[(int) mode] = 1;
170 SET_SRC (pat) = reg;
171 SET_DEST (pat) = mem1;
172 if (recog (pat, insn, &num_clobbers) >= 0)
173 direct_store[(int) mode] = 1;
177 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
179 opt_scalar_float_mode mode_iter;
180 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
182 scalar_float_mode mode = mode_iter.require ();
183 scalar_float_mode srcmode;
184 FOR_EACH_MODE_UNTIL (srcmode, mode)
186 enum insn_code ic;
188 ic = can_extend_p (mode, srcmode, 0);
189 if (ic == CODE_FOR_nothing)
190 continue;
192 PUT_MODE (mem, srcmode);
194 if (insn_operand_matches (ic, 1, mem))
195 float_extend_from_mem[mode][srcmode] = true;
200 /* This is run at the start of compiling a function. */
202 void
203 init_expr (void)
205 memset (&crtl->expr, 0, sizeof (crtl->expr));
208 /* Copy data from FROM to TO, where the machine modes are not the same.
209 Both modes may be integer, or both may be floating, or both may be
210 fixed-point.
211 UNSIGNEDP should be nonzero if FROM is an unsigned type.
212 This causes zero-extension instead of sign-extension. */
214 void
215 convert_move (rtx to, rtx from, int unsignedp)
217 machine_mode to_mode = GET_MODE (to);
218 machine_mode from_mode = GET_MODE (from);
219 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
220 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
221 enum insn_code code;
222 rtx libcall;
224 /* rtx code for making an equivalent value. */
225 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
226 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
229 gcc_assert (to_real == from_real);
230 gcc_assert (to_mode != BLKmode);
231 gcc_assert (from_mode != BLKmode);
233 /* If the source and destination are already the same, then there's
234 nothing to do. */
235 if (to == from)
236 return;
238 /* If FROM is a SUBREG that indicates that we have already done at least
239 the required extension, strip it. We don't handle such SUBREGs as
240 TO here. */
242 scalar_int_mode to_int_mode;
243 if (GET_CODE (from) == SUBREG
244 && SUBREG_PROMOTED_VAR_P (from)
245 && is_a <scalar_int_mode> (to_mode, &to_int_mode)
246 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
247 >= GET_MODE_PRECISION (to_int_mode))
248 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
249 from = gen_lowpart (to_int_mode, from), from_mode = to_int_mode;
251 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
253 if (to_mode == from_mode
254 || (from_mode == VOIDmode && CONSTANT_P (from)))
256 emit_move_insn (to, from);
257 return;
260 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
262 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
264 if (VECTOR_MODE_P (to_mode))
265 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
266 else
267 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
269 emit_move_insn (to, from);
270 return;
273 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
275 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
276 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
277 return;
280 if (to_real)
282 rtx value;
283 rtx_insn *insns;
284 convert_optab tab;
286 gcc_assert ((GET_MODE_PRECISION (from_mode)
287 != GET_MODE_PRECISION (to_mode))
288 || (DECIMAL_FLOAT_MODE_P (from_mode)
289 != DECIMAL_FLOAT_MODE_P (to_mode)));
291 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
292 /* Conversion between decimal float and binary float, same size. */
293 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
294 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
295 tab = sext_optab;
296 else
297 tab = trunc_optab;
299 /* Try converting directly if the insn is supported. */
301 code = convert_optab_handler (tab, to_mode, from_mode);
302 if (code != CODE_FOR_nothing)
304 emit_unop_insn (code, to, from,
305 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
306 return;
309 /* Otherwise use a libcall. */
310 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
312 /* Is this conversion implemented yet? */
313 gcc_assert (libcall);
315 start_sequence ();
316 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
317 1, from, from_mode);
318 insns = get_insns ();
319 end_sequence ();
320 emit_libcall_block (insns, to, value,
321 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
322 from)
323 : gen_rtx_FLOAT_EXTEND (to_mode, from));
324 return;
327 /* Handle pointer conversion. */ /* SPEE 900220. */
328 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
330 convert_optab ctab;
332 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
333 ctab = trunc_optab;
334 else if (unsignedp)
335 ctab = zext_optab;
336 else
337 ctab = sext_optab;
339 if (convert_optab_handler (ctab, to_mode, from_mode)
340 != CODE_FOR_nothing)
342 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
343 to, from, UNKNOWN);
344 return;
348 /* Targets are expected to provide conversion insns between PxImode and
349 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
350 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
352 scalar_int_mode full_mode
353 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
355 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
356 != CODE_FOR_nothing);
358 if (full_mode != from_mode)
359 from = convert_to_mode (full_mode, from, unsignedp);
360 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
361 to, from, UNKNOWN);
362 return;
364 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
366 rtx new_from;
367 scalar_int_mode full_mode
368 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
369 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
370 enum insn_code icode;
372 icode = convert_optab_handler (ctab, full_mode, from_mode);
373 gcc_assert (icode != CODE_FOR_nothing);
375 if (to_mode == full_mode)
377 emit_unop_insn (icode, to, from, UNKNOWN);
378 return;
381 new_from = gen_reg_rtx (full_mode);
382 emit_unop_insn (icode, new_from, from, UNKNOWN);
384 /* else proceed to integer conversions below. */
385 from_mode = full_mode;
386 from = new_from;
389 /* Make sure both are fixed-point modes or both are not. */
390 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
391 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
392 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
394 /* If we widen from_mode to to_mode and they are in the same class,
395 we won't saturate the result.
396 Otherwise, always saturate the result to play safe. */
397 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
398 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
399 expand_fixed_convert (to, from, 0, 0);
400 else
401 expand_fixed_convert (to, from, 0, 1);
402 return;
405 /* Now both modes are integers. */
407 /* Handle expanding beyond a word. */
408 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
409 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
411 rtx_insn *insns;
412 rtx lowpart;
413 rtx fill_value;
414 rtx lowfrom;
415 int i;
416 machine_mode lowpart_mode;
417 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
419 /* Try converting directly if the insn is supported. */
420 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
421 != CODE_FOR_nothing)
423 /* If FROM is a SUBREG, put it into a register. Do this
424 so that we always generate the same set of insns for
425 better cse'ing; if an intermediate assignment occurred,
426 we won't be doing the operation directly on the SUBREG. */
427 if (optimize > 0 && GET_CODE (from) == SUBREG)
428 from = force_reg (from_mode, from);
429 emit_unop_insn (code, to, from, equiv_code);
430 return;
432 /* Next, try converting via full word. */
433 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
434 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
435 != CODE_FOR_nothing))
437 rtx word_to = gen_reg_rtx (word_mode);
438 if (REG_P (to))
440 if (reg_overlap_mentioned_p (to, from))
441 from = force_reg (from_mode, from);
442 emit_clobber (to);
444 convert_move (word_to, from, unsignedp);
445 emit_unop_insn (code, to, word_to, equiv_code);
446 return;
449 /* No special multiword conversion insn; do it by hand. */
450 start_sequence ();
452 /* Since we will turn this into a no conflict block, we must ensure
453 the source does not overlap the target so force it into an isolated
454 register when maybe so. Likewise for any MEM input, since the
455 conversion sequence might require several references to it and we
456 must ensure we're getting the same value every time. */
458 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
459 from = force_reg (from_mode, from);
461 /* Get a copy of FROM widened to a word, if necessary. */
462 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
463 lowpart_mode = word_mode;
464 else
465 lowpart_mode = from_mode;
467 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
469 lowpart = gen_lowpart (lowpart_mode, to);
470 emit_move_insn (lowpart, lowfrom);
472 /* Compute the value to put in each remaining word. */
473 if (unsignedp)
474 fill_value = const0_rtx;
475 else
476 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
477 LT, lowfrom, const0_rtx,
478 lowpart_mode, 0, -1);
480 /* Fill the remaining words. */
481 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
483 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
484 rtx subword = operand_subword (to, index, 1, to_mode);
486 gcc_assert (subword);
488 if (fill_value != subword)
489 emit_move_insn (subword, fill_value);
492 insns = get_insns ();
493 end_sequence ();
495 emit_insn (insns);
496 return;
499 /* Truncating multi-word to a word or less. */
500 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
501 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
503 if (!((MEM_P (from)
504 && ! MEM_VOLATILE_P (from)
505 && direct_load[(int) to_mode]
506 && ! mode_dependent_address_p (XEXP (from, 0),
507 MEM_ADDR_SPACE (from)))
508 || REG_P (from)
509 || GET_CODE (from) == SUBREG))
510 from = force_reg (from_mode, from);
511 convert_move (to, gen_lowpart (word_mode, from), 0);
512 return;
515 /* Now follow all the conversions between integers
516 no more than a word long. */
518 /* For truncation, usually we can just refer to FROM in a narrower mode. */
519 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
520 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
522 if (!((MEM_P (from)
523 && ! MEM_VOLATILE_P (from)
524 && direct_load[(int) to_mode]
525 && ! mode_dependent_address_p (XEXP (from, 0),
526 MEM_ADDR_SPACE (from)))
527 || REG_P (from)
528 || GET_CODE (from) == SUBREG))
529 from = force_reg (from_mode, from);
530 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
531 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
532 from = copy_to_reg (from);
533 emit_move_insn (to, gen_lowpart (to_mode, from));
534 return;
537 /* Handle extension. */
538 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
540 /* Convert directly if that works. */
541 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
542 != CODE_FOR_nothing)
544 emit_unop_insn (code, to, from, equiv_code);
545 return;
547 else
549 machine_mode intermediate;
550 rtx tmp;
551 int shift_amount;
553 /* Search for a mode to convert via. */
554 FOR_EACH_MODE_FROM (intermediate, from_mode)
555 if (((can_extend_p (to_mode, intermediate, unsignedp)
556 != CODE_FOR_nothing)
557 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
558 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
559 && (can_extend_p (intermediate, from_mode, unsignedp)
560 != CODE_FOR_nothing))
562 convert_move (to, convert_to_mode (intermediate, from,
563 unsignedp), unsignedp);
564 return;
567 /* No suitable intermediate mode.
568 Generate what we need with shifts. */
569 shift_amount = (GET_MODE_PRECISION (to_mode)
570 - GET_MODE_PRECISION (from_mode));
571 from = gen_lowpart (to_mode, force_reg (from_mode, from));
572 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
573 to, unsignedp);
574 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
575 to, unsignedp);
576 if (tmp != to)
577 emit_move_insn (to, tmp);
578 return;
582 /* Support special truncate insns for certain modes. */
583 if (convert_optab_handler (trunc_optab, to_mode,
584 from_mode) != CODE_FOR_nothing)
586 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
587 to, from, UNKNOWN);
588 return;
591 /* Handle truncation of volatile memrefs, and so on;
592 the things that couldn't be truncated directly,
593 and for which there was no special instruction.
595 ??? Code above formerly short-circuited this, for most integer
596 mode pairs, with a force_reg in from_mode followed by a recursive
597 call to this routine. Appears always to have been wrong. */
598 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
600 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
601 emit_move_insn (to, temp);
602 return;
605 /* Mode combination is not recognized. */
606 gcc_unreachable ();
609 /* Return an rtx for a value that would result
610 from converting X to mode MODE.
611 Both X and MODE may be floating, or both integer.
612 UNSIGNEDP is nonzero if X is an unsigned value.
613 This can be done by referring to a part of X in place
614 or by copying to a new temporary with conversion. */
617 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
619 return convert_modes (mode, VOIDmode, x, unsignedp);
622 /* Return an rtx for a value that would result
623 from converting X from mode OLDMODE to mode MODE.
624 Both modes may be floating, or both integer.
625 UNSIGNEDP is nonzero if X is an unsigned value.
627 This can be done by referring to a part of X in place
628 or by copying to a new temporary with conversion.
630 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
633 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
635 rtx temp;
636 scalar_int_mode int_mode;
638 /* If FROM is a SUBREG that indicates that we have already done at least
639 the required extension, strip it. */
641 if (GET_CODE (x) == SUBREG
642 && SUBREG_PROMOTED_VAR_P (x)
643 && is_a <scalar_int_mode> (mode, &int_mode)
644 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (int_mode)
645 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
646 x = gen_lowpart (int_mode, SUBREG_REG (x));
648 if (GET_MODE (x) != VOIDmode)
649 oldmode = GET_MODE (x);
651 if (mode == oldmode)
652 return x;
654 if (CONST_SCALAR_INT_P (x)
655 && is_int_mode (mode, &int_mode))
657 /* If the caller did not tell us the old mode, then there is not
658 much to do with respect to canonicalization. We have to
659 assume that all the bits are significant. */
660 if (GET_MODE_CLASS (oldmode) != MODE_INT)
661 oldmode = MAX_MODE_INT;
662 wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
663 GET_MODE_PRECISION (int_mode),
664 unsignedp ? UNSIGNED : SIGNED);
665 return immed_wide_int_const (w, int_mode);
668 /* We can do this with a gen_lowpart if both desired and current modes
669 are integer, and this is either a constant integer, a register, or a
670 non-volatile MEM. */
671 scalar_int_mode int_oldmode;
672 if (is_int_mode (mode, &int_mode)
673 && is_int_mode (oldmode, &int_oldmode)
674 && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
675 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
676 || (REG_P (x)
677 && (!HARD_REGISTER_P (x)
678 || HARD_REGNO_MODE_OK (REGNO (x), int_mode))
679 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
680 return gen_lowpart (int_mode, x);
682 /* Converting from integer constant into mode is always equivalent to an
683 subreg operation. */
684 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
686 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
687 return simplify_gen_subreg (mode, x, oldmode, 0);
690 temp = gen_reg_rtx (mode);
691 convert_move (temp, x, unsignedp);
692 return temp;
695 /* Return the largest alignment we can use for doing a move (or store)
696 of MAX_PIECES. ALIGN is the largest alignment we could use. */
698 static unsigned int
699 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
701 machine_mode tmode;
703 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
704 if (align >= GET_MODE_ALIGNMENT (tmode))
705 align = GET_MODE_ALIGNMENT (tmode);
706 else
708 machine_mode tmode, xmode;
710 xmode = NARROWEST_INT_MODE;
711 FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
713 if (GET_MODE_SIZE (tmode) > max_pieces
714 || SLOW_UNALIGNED_ACCESS (tmode, align))
715 break;
716 xmode = tmode;
719 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
722 return align;
725 /* Return the widest integer mode no wider than SIZE. If no such mode
726 can be found, return VOIDmode. */
728 static machine_mode
729 widest_int_mode_for_size (unsigned int size)
731 machine_mode tmode, mode = VOIDmode;
733 FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
734 if (GET_MODE_SIZE (tmode) < size)
735 mode = tmode;
737 return mode;
740 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
741 and should be performed piecewise. */
743 static bool
744 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
745 enum by_pieces_operation op)
747 return targetm.use_by_pieces_infrastructure_p (len, align, op,
748 optimize_insn_for_speed_p ());
751 /* Determine whether the LEN bytes can be moved by using several move
752 instructions. Return nonzero if a call to move_by_pieces should
753 succeed. */
755 bool
756 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
758 return can_do_by_pieces (len, align, MOVE_BY_PIECES);
761 /* Return number of insns required to perform operation OP by pieces
762 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
764 unsigned HOST_WIDE_INT
765 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
766 unsigned int max_size, by_pieces_operation op)
768 unsigned HOST_WIDE_INT n_insns = 0;
770 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
772 while (max_size > 1 && l > 0)
774 machine_mode mode;
775 enum insn_code icode;
777 mode = widest_int_mode_for_size (max_size);
779 if (mode == VOIDmode)
780 break;
781 unsigned int modesize = GET_MODE_SIZE (mode);
783 icode = optab_handler (mov_optab, mode);
784 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
786 unsigned HOST_WIDE_INT n_pieces = l / modesize;
787 l %= modesize;
788 switch (op)
790 default:
791 n_insns += n_pieces;
792 break;
794 case COMPARE_BY_PIECES:
795 int batch = targetm.compare_by_pieces_branch_ratio (mode);
796 int batch_ops = 4 * batch - 1;
797 unsigned HOST_WIDE_INT full = n_pieces / batch;
798 n_insns += full * batch_ops;
799 if (n_pieces % batch != 0)
800 n_insns++;
801 break;
805 max_size = modesize;
808 gcc_assert (!l);
809 return n_insns;
812 /* Used when performing piecewise block operations, holds information
813 about one of the memory objects involved. The member functions
814 can be used to generate code for loading from the object and
815 updating the address when iterating. */
817 class pieces_addr
819 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
820 stack pushes. */
821 rtx m_obj;
822 /* The address of the object. Can differ from that seen in the
823 MEM rtx if we copied the address to a register. */
824 rtx m_addr;
825 /* Nonzero if the address on the object has an autoincrement already,
826 signifies whether that was an increment or decrement. */
827 signed char m_addr_inc;
828 /* Nonzero if we intend to use autoinc without the address already
829 having autoinc form. We will insert add insns around each memory
830 reference, expecting later passes to form autoinc addressing modes.
831 The only supported options are predecrement and postincrement. */
832 signed char m_explicit_inc;
833 /* True if we have either of the two possible cases of using
834 autoincrement. */
835 bool m_auto;
836 /* True if this is an address to be used for load operations rather
837 than stores. */
838 bool m_is_load;
840 /* Optionally, a function to obtain constants for any given offset into
841 the objects, and data associated with it. */
842 by_pieces_constfn m_constfn;
843 void *m_cfndata;
844 public:
845 pieces_addr (rtx, bool, by_pieces_constfn, void *);
846 rtx adjust (machine_mode, HOST_WIDE_INT);
847 void increment_address (HOST_WIDE_INT);
848 void maybe_predec (HOST_WIDE_INT);
849 void maybe_postinc (HOST_WIDE_INT);
850 void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
851 int get_addr_inc ()
853 return m_addr_inc;
857 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
858 true if the operation to be performed on this object is a load
859 rather than a store. For stores, OBJ can be NULL, in which case we
860 assume the operation is a stack push. For loads, the optional
861 CONSTFN and its associated CFNDATA can be used in place of the
862 memory load. */
864 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
865 void *cfndata)
866 : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
868 m_addr_inc = 0;
869 m_auto = false;
870 if (obj)
872 rtx addr = XEXP (obj, 0);
873 rtx_code code = GET_CODE (addr);
874 m_addr = addr;
875 bool dec = code == PRE_DEC || code == POST_DEC;
876 bool inc = code == PRE_INC || code == POST_INC;
877 m_auto = inc || dec;
878 if (m_auto)
879 m_addr_inc = dec ? -1 : 1;
881 /* While we have always looked for these codes here, the code
882 implementing the memory operation has never handled them.
883 Support could be added later if necessary or beneficial. */
884 gcc_assert (code != PRE_INC && code != POST_DEC);
886 else
888 m_addr = NULL_RTX;
889 if (!is_load)
891 m_auto = true;
892 if (STACK_GROWS_DOWNWARD)
893 m_addr_inc = -1;
894 else
895 m_addr_inc = 1;
897 else
898 gcc_assert (constfn != NULL);
900 m_explicit_inc = 0;
901 if (constfn)
902 gcc_assert (is_load);
905 /* Decide whether to use autoinc for an address involved in a memory op.
906 MODE is the mode of the accesses, REVERSE is true if we've decided to
907 perform the operation starting from the end, and LEN is the length of
908 the operation. Don't override an earlier decision to set m_auto. */
910 void
911 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
912 HOST_WIDE_INT len)
914 if (m_auto || m_obj == NULL_RTX)
915 return;
917 bool use_predec = (m_is_load
918 ? USE_LOAD_PRE_DECREMENT (mode)
919 : USE_STORE_PRE_DECREMENT (mode));
920 bool use_postinc = (m_is_load
921 ? USE_LOAD_POST_INCREMENT (mode)
922 : USE_STORE_POST_INCREMENT (mode));
923 machine_mode addr_mode = get_address_mode (m_obj);
925 if (use_predec && reverse)
927 m_addr = copy_to_mode_reg (addr_mode,
928 plus_constant (addr_mode,
929 m_addr, len));
930 m_auto = true;
931 m_explicit_inc = -1;
933 else if (use_postinc && !reverse)
935 m_addr = copy_to_mode_reg (addr_mode, m_addr);
936 m_auto = true;
937 m_explicit_inc = 1;
939 else if (CONSTANT_P (m_addr))
940 m_addr = copy_to_mode_reg (addr_mode, m_addr);
943 /* Adjust the address to refer to the data at OFFSET in MODE. If we
944 are using autoincrement for this address, we don't add the offset,
945 but we still modify the MEM's properties. */
948 pieces_addr::adjust (machine_mode mode, HOST_WIDE_INT offset)
950 if (m_constfn)
951 return m_constfn (m_cfndata, offset, mode);
952 if (m_obj == NULL_RTX)
953 return NULL_RTX;
954 if (m_auto)
955 return adjust_automodify_address (m_obj, mode, m_addr, offset);
956 else
957 return adjust_address (m_obj, mode, offset);
960 /* Emit an add instruction to increment the address by SIZE. */
962 void
963 pieces_addr::increment_address (HOST_WIDE_INT size)
965 rtx amount = gen_int_mode (size, GET_MODE (m_addr));
966 emit_insn (gen_add2_insn (m_addr, amount));
969 /* If we are supposed to decrement the address after each access, emit code
970 to do so now. Increment by SIZE (which has should have the correct sign
971 already). */
973 void
974 pieces_addr::maybe_predec (HOST_WIDE_INT size)
976 if (m_explicit_inc >= 0)
977 return;
978 gcc_assert (HAVE_PRE_DECREMENT);
979 increment_address (size);
982 /* If we are supposed to decrement the address after each access, emit code
983 to do so now. Increment by SIZE. */
985 void
986 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
988 if (m_explicit_inc <= 0)
989 return;
990 gcc_assert (HAVE_POST_INCREMENT);
991 increment_address (size);
994 /* This structure is used by do_op_by_pieces to describe the operation
995 to be performed. */
997 class op_by_pieces_d
999 protected:
1000 pieces_addr m_to, m_from;
1001 unsigned HOST_WIDE_INT m_len;
1002 HOST_WIDE_INT m_offset;
1003 unsigned int m_align;
1004 unsigned int m_max_size;
1005 bool m_reverse;
1007 /* Virtual functions, overriden by derived classes for the specific
1008 operation. */
1009 virtual void generate (rtx, rtx, machine_mode) = 0;
1010 virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1011 virtual void finish_mode (machine_mode)
1015 public:
1016 op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1017 unsigned HOST_WIDE_INT, unsigned int);
1018 void run ();
1021 /* The constructor for an op_by_pieces_d structure. We require two
1022 objects named TO and FROM, which are identified as loads or stores
1023 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1024 and its associated FROM_CFN_DATA can be used to replace loads with
1025 constant values. LEN describes the length of the operation. */
1027 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1028 rtx from, bool from_load,
1029 by_pieces_constfn from_cfn,
1030 void *from_cfn_data,
1031 unsigned HOST_WIDE_INT len,
1032 unsigned int align)
1033 : m_to (to, to_load, NULL, NULL),
1034 m_from (from, from_load, from_cfn, from_cfn_data),
1035 m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1037 int toi = m_to.get_addr_inc ();
1038 int fromi = m_from.get_addr_inc ();
1039 if (toi >= 0 && fromi >= 0)
1040 m_reverse = false;
1041 else if (toi <= 0 && fromi <= 0)
1042 m_reverse = true;
1043 else
1044 gcc_unreachable ();
1046 m_offset = m_reverse ? len : 0;
1047 align = MIN (to ? MEM_ALIGN (to) : align,
1048 from ? MEM_ALIGN (from) : align);
1050 /* If copying requires more than two move insns,
1051 copy addresses to registers (to make displacements shorter)
1052 and use post-increment if available. */
1053 if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1055 /* Find the mode of the largest comparison. */
1056 machine_mode mode = widest_int_mode_for_size (m_max_size);
1058 m_from.decide_autoinc (mode, m_reverse, len);
1059 m_to.decide_autoinc (mode, m_reverse, len);
1062 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1063 m_align = align;
1066 /* This function contains the main loop used for expanding a block
1067 operation. First move what we can in the largest integer mode,
1068 then go to successively smaller modes. For every access, call
1069 GENFUN with the two operands and the EXTRA_DATA. */
1071 void
1072 op_by_pieces_d::run ()
1074 while (m_max_size > 1 && m_len > 0)
1076 machine_mode mode = widest_int_mode_for_size (m_max_size);
1078 if (mode == VOIDmode)
1079 break;
1081 if (prepare_mode (mode, m_align))
1083 unsigned int size = GET_MODE_SIZE (mode);
1084 rtx to1 = NULL_RTX, from1;
1086 while (m_len >= size)
1088 if (m_reverse)
1089 m_offset -= size;
1091 to1 = m_to.adjust (mode, m_offset);
1092 from1 = m_from.adjust (mode, m_offset);
1094 m_to.maybe_predec (-(HOST_WIDE_INT)size);
1095 m_from.maybe_predec (-(HOST_WIDE_INT)size);
1097 generate (to1, from1, mode);
1099 m_to.maybe_postinc (size);
1100 m_from.maybe_postinc (size);
1102 if (!m_reverse)
1103 m_offset += size;
1105 m_len -= size;
1108 finish_mode (mode);
1111 m_max_size = GET_MODE_SIZE (mode);
1114 /* The code above should have handled everything. */
1115 gcc_assert (!m_len);
1118 /* Derived class from op_by_pieces_d, providing support for block move
1119 operations. */
1121 class move_by_pieces_d : public op_by_pieces_d
1123 insn_gen_fn m_gen_fun;
1124 void generate (rtx, rtx, machine_mode);
1125 bool prepare_mode (machine_mode, unsigned int);
1127 public:
1128 move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1129 unsigned int align)
1130 : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1133 rtx finish_endp (int);
1136 /* Return true if MODE can be used for a set of copies, given an
1137 alignment ALIGN. Prepare whatever data is necessary for later
1138 calls to generate. */
1140 bool
1141 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1143 insn_code icode = optab_handler (mov_optab, mode);
1144 m_gen_fun = GEN_FCN (icode);
1145 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1148 /* A callback used when iterating for a compare_by_pieces_operation.
1149 OP0 and OP1 are the values that have been loaded and should be
1150 compared in MODE. If OP0 is NULL, this means we should generate a
1151 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1152 gen function that should be used to generate the mode. */
1154 void
1155 move_by_pieces_d::generate (rtx op0, rtx op1,
1156 machine_mode mode ATTRIBUTE_UNUSED)
1158 #ifdef PUSH_ROUNDING
1159 if (op0 == NULL_RTX)
1161 emit_single_push_insn (mode, op1, NULL);
1162 return;
1164 #endif
1165 emit_insn (m_gen_fun (op0, op1));
1168 /* Perform the final adjustment at the end of a string to obtain the
1169 correct return value for the block operation. If ENDP is 1 return
1170 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1171 end minus one byte ala stpcpy. */
1174 move_by_pieces_d::finish_endp (int endp)
1176 gcc_assert (!m_reverse);
1177 if (endp == 2)
1179 m_to.maybe_postinc (-1);
1180 --m_offset;
1182 return m_to.adjust (QImode, m_offset);
1185 /* Generate several move instructions to copy LEN bytes from block FROM to
1186 block TO. (These are MEM rtx's with BLKmode).
1188 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1189 used to push FROM to the stack.
1191 ALIGN is maximum stack alignment we can assume.
1193 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1194 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1195 stpcpy. */
1198 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1199 unsigned int align, int endp)
1201 #ifndef PUSH_ROUNDING
1202 if (to == NULL)
1203 gcc_unreachable ();
1204 #endif
1206 move_by_pieces_d data (to, from, len, align);
1208 data.run ();
1210 if (endp)
1211 return data.finish_endp (endp);
1212 else
1213 return to;
1216 /* Derived class from op_by_pieces_d, providing support for block move
1217 operations. */
1219 class store_by_pieces_d : public op_by_pieces_d
1221 insn_gen_fn m_gen_fun;
1222 void generate (rtx, rtx, machine_mode);
1223 bool prepare_mode (machine_mode, unsigned int);
1225 public:
1226 store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1227 unsigned HOST_WIDE_INT len, unsigned int align)
1228 : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1231 rtx finish_endp (int);
1234 /* Return true if MODE can be used for a set of stores, given an
1235 alignment ALIGN. Prepare whatever data is necessary for later
1236 calls to generate. */
1238 bool
1239 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1241 insn_code icode = optab_handler (mov_optab, mode);
1242 m_gen_fun = GEN_FCN (icode);
1243 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1246 /* A callback used when iterating for a store_by_pieces_operation.
1247 OP0 and OP1 are the values that have been loaded and should be
1248 compared in MODE. If OP0 is NULL, this means we should generate a
1249 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1250 gen function that should be used to generate the mode. */
1252 void
1253 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1255 emit_insn (m_gen_fun (op0, op1));
1258 /* Perform the final adjustment at the end of a string to obtain the
1259 correct return value for the block operation. If ENDP is 1 return
1260 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1261 end minus one byte ala stpcpy. */
1264 store_by_pieces_d::finish_endp (int endp)
1266 gcc_assert (!m_reverse);
1267 if (endp == 2)
1269 m_to.maybe_postinc (-1);
1270 --m_offset;
1272 return m_to.adjust (QImode, m_offset);
1275 /* Determine whether the LEN bytes generated by CONSTFUN can be
1276 stored to memory using several move instructions. CONSTFUNDATA is
1277 a pointer which will be passed as argument in every CONSTFUN call.
1278 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1279 a memset operation and false if it's a copy of a constant string.
1280 Return nonzero if a call to store_by_pieces should succeed. */
1283 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1284 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
1285 void *constfundata, unsigned int align, bool memsetp)
1287 unsigned HOST_WIDE_INT l;
1288 unsigned int max_size;
1289 HOST_WIDE_INT offset = 0;
1290 machine_mode mode;
1291 enum insn_code icode;
1292 int reverse;
1293 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1294 rtx cst ATTRIBUTE_UNUSED;
1296 if (len == 0)
1297 return 1;
1299 if (!targetm.use_by_pieces_infrastructure_p (len, align,
1300 memsetp
1301 ? SET_BY_PIECES
1302 : STORE_BY_PIECES,
1303 optimize_insn_for_speed_p ()))
1304 return 0;
1306 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1308 /* We would first store what we can in the largest integer mode, then go to
1309 successively smaller modes. */
1311 for (reverse = 0;
1312 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1313 reverse++)
1315 l = len;
1316 max_size = STORE_MAX_PIECES + 1;
1317 while (max_size > 1 && l > 0)
1319 mode = widest_int_mode_for_size (max_size);
1321 if (mode == VOIDmode)
1322 break;
1324 icode = optab_handler (mov_optab, mode);
1325 if (icode != CODE_FOR_nothing
1326 && align >= GET_MODE_ALIGNMENT (mode))
1328 unsigned int size = GET_MODE_SIZE (mode);
1330 while (l >= size)
1332 if (reverse)
1333 offset -= size;
1335 cst = (*constfun) (constfundata, offset, mode);
1336 if (!targetm.legitimate_constant_p (mode, cst))
1337 return 0;
1339 if (!reverse)
1340 offset += size;
1342 l -= size;
1346 max_size = GET_MODE_SIZE (mode);
1349 /* The code above should have handled everything. */
1350 gcc_assert (!l);
1353 return 1;
1356 /* Generate several move instructions to store LEN bytes generated by
1357 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1358 pointer which will be passed as argument in every CONSTFUN call.
1359 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1360 a memset operation and false if it's a copy of a constant string.
1361 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1362 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1363 stpcpy. */
1366 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1367 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
1368 void *constfundata, unsigned int align, bool memsetp, int endp)
1370 if (len == 0)
1372 gcc_assert (endp != 2);
1373 return to;
1376 gcc_assert (targetm.use_by_pieces_infrastructure_p
1377 (len, align,
1378 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1379 optimize_insn_for_speed_p ()));
1381 store_by_pieces_d data (to, constfun, constfundata, len, align);
1382 data.run ();
1384 if (endp)
1385 return data.finish_endp (endp);
1386 else
1387 return to;
1390 /* Callback routine for clear_by_pieces.
1391 Return const0_rtx unconditionally. */
1393 static rtx
1394 clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode)
1396 return const0_rtx;
1399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1400 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1402 static void
1403 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1405 if (len == 0)
1406 return;
1408 store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1409 data.run ();
1412 /* Context used by compare_by_pieces_genfn. It stores the fail label
1413 to jump to in case of miscomparison, and for branch ratios greater than 1,
1414 it stores an accumulator and the current and maximum counts before
1415 emitting another branch. */
1417 class compare_by_pieces_d : public op_by_pieces_d
1419 rtx_code_label *m_fail_label;
1420 rtx m_accumulator;
1421 int m_count, m_batch;
1423 void generate (rtx, rtx, machine_mode);
1424 bool prepare_mode (machine_mode, unsigned int);
1425 void finish_mode (machine_mode);
1426 public:
1427 compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1428 void *op1_cfn_data, HOST_WIDE_INT len, int align,
1429 rtx_code_label *fail_label)
1430 : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1432 m_fail_label = fail_label;
1436 /* A callback used when iterating for a compare_by_pieces_operation.
1437 OP0 and OP1 are the values that have been loaded and should be
1438 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1439 context structure. */
1441 void
1442 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1444 if (m_batch > 1)
1446 rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1447 true, OPTAB_LIB_WIDEN);
1448 if (m_count != 0)
1449 temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1450 true, OPTAB_LIB_WIDEN);
1451 m_accumulator = temp;
1453 if (++m_count < m_batch)
1454 return;
1456 m_count = 0;
1457 op0 = m_accumulator;
1458 op1 = const0_rtx;
1459 m_accumulator = NULL_RTX;
1461 do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1462 m_fail_label, profile_probability::uninitialized ());
1465 /* Return true if MODE can be used for a set of moves and comparisons,
1466 given an alignment ALIGN. Prepare whatever data is necessary for
1467 later calls to generate. */
1469 bool
1470 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1472 insn_code icode = optab_handler (mov_optab, mode);
1473 if (icode == CODE_FOR_nothing
1474 || align < GET_MODE_ALIGNMENT (mode)
1475 || !can_compare_p (EQ, mode, ccp_jump))
1476 return false;
1477 m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1478 if (m_batch < 0)
1479 return false;
1480 m_accumulator = NULL_RTX;
1481 m_count = 0;
1482 return true;
1485 /* Called after expanding a series of comparisons in MODE. If we have
1486 accumulated results for which we haven't emitted a branch yet, do
1487 so now. */
1489 void
1490 compare_by_pieces_d::finish_mode (machine_mode mode)
1492 if (m_accumulator != NULL_RTX)
1493 do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1494 NULL_RTX, NULL, m_fail_label,
1495 profile_probability::uninitialized ());
1498 /* Generate several move instructions to compare LEN bytes from blocks
1499 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1501 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1502 used to push FROM to the stack.
1504 ALIGN is maximum stack alignment we can assume.
1506 Optionally, the caller can pass a constfn and associated data in A1_CFN
1507 and A1_CFN_DATA. describing that the second operand being compared is a
1508 known constant and how to obtain its data. */
1510 static rtx
1511 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1512 rtx target, unsigned int align,
1513 by_pieces_constfn a1_cfn, void *a1_cfn_data)
1515 rtx_code_label *fail_label = gen_label_rtx ();
1516 rtx_code_label *end_label = gen_label_rtx ();
1518 if (target == NULL_RTX
1519 || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1520 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1522 compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1523 fail_label);
1525 data.run ();
1527 emit_move_insn (target, const0_rtx);
1528 emit_jump (end_label);
1529 emit_barrier ();
1530 emit_label (fail_label);
1531 emit_move_insn (target, const1_rtx);
1532 emit_label (end_label);
1534 return target;
1537 /* Emit code to move a block Y to a block X. This may be done with
1538 string-move instructions, with multiple scalar move instructions,
1539 or with a library call.
1541 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1542 SIZE is an rtx that says how long they are.
1543 ALIGN is the maximum alignment we can assume they have.
1544 METHOD describes what kind of copy this is, and what mechanisms may be used.
1545 MIN_SIZE is the minimal size of block to move
1546 MAX_SIZE is the maximal size of block to move, if it can not be represented
1547 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1549 Return the address of the new block, if memcpy is called and returns it,
1550 0 otherwise. */
1553 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1554 unsigned int expected_align, HOST_WIDE_INT expected_size,
1555 unsigned HOST_WIDE_INT min_size,
1556 unsigned HOST_WIDE_INT max_size,
1557 unsigned HOST_WIDE_INT probable_max_size)
1559 bool may_use_call;
1560 rtx retval = 0;
1561 unsigned int align;
1563 gcc_assert (size);
1564 if (CONST_INT_P (size) && INTVAL (size) == 0)
1565 return 0;
1567 switch (method)
1569 case BLOCK_OP_NORMAL:
1570 case BLOCK_OP_TAILCALL:
1571 may_use_call = true;
1572 break;
1574 case BLOCK_OP_CALL_PARM:
1575 may_use_call = block_move_libcall_safe_for_call_parm ();
1577 /* Make inhibit_defer_pop nonzero around the library call
1578 to force it to pop the arguments right away. */
1579 NO_DEFER_POP;
1580 break;
1582 case BLOCK_OP_NO_LIBCALL:
1583 may_use_call = false;
1584 break;
1586 default:
1587 gcc_unreachable ();
1590 gcc_assert (MEM_P (x) && MEM_P (y));
1591 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1592 gcc_assert (align >= BITS_PER_UNIT);
1594 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1595 block copy is more efficient for other large modes, e.g. DCmode. */
1596 x = adjust_address (x, BLKmode, 0);
1597 y = adjust_address (y, BLKmode, 0);
1599 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1600 can be incorrect is coming from __builtin_memcpy. */
1601 if (CONST_INT_P (size))
1603 x = shallow_copy_rtx (x);
1604 y = shallow_copy_rtx (y);
1605 set_mem_size (x, INTVAL (size));
1606 set_mem_size (y, INTVAL (size));
1609 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1610 move_by_pieces (x, y, INTVAL (size), align, 0);
1611 else if (emit_block_move_via_movmem (x, y, size, align,
1612 expected_align, expected_size,
1613 min_size, max_size, probable_max_size))
1615 else if (may_use_call
1616 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1617 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1619 /* Since x and y are passed to a libcall, mark the corresponding
1620 tree EXPR as addressable. */
1621 tree y_expr = MEM_EXPR (y);
1622 tree x_expr = MEM_EXPR (x);
1623 if (y_expr)
1624 mark_addressable (y_expr);
1625 if (x_expr)
1626 mark_addressable (x_expr);
1627 retval = emit_block_copy_via_libcall (x, y, size,
1628 method == BLOCK_OP_TAILCALL);
1631 else
1632 emit_block_move_via_loop (x, y, size, align);
1634 if (method == BLOCK_OP_CALL_PARM)
1635 OK_DEFER_POP;
1637 return retval;
1641 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1643 unsigned HOST_WIDE_INT max, min = 0;
1644 if (GET_CODE (size) == CONST_INT)
1645 min = max = UINTVAL (size);
1646 else
1647 max = GET_MODE_MASK (GET_MODE (size));
1648 return emit_block_move_hints (x, y, size, method, 0, -1,
1649 min, max, max);
1652 /* A subroutine of emit_block_move. Returns true if calling the
1653 block move libcall will not clobber any parameters which may have
1654 already been placed on the stack. */
1656 static bool
1657 block_move_libcall_safe_for_call_parm (void)
1659 #if defined (REG_PARM_STACK_SPACE)
1660 tree fn;
1661 #endif
1663 /* If arguments are pushed on the stack, then they're safe. */
1664 if (PUSH_ARGS)
1665 return true;
1667 /* If registers go on the stack anyway, any argument is sure to clobber
1668 an outgoing argument. */
1669 #if defined (REG_PARM_STACK_SPACE)
1670 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1671 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1672 depend on its argument. */
1673 (void) fn;
1674 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1675 && REG_PARM_STACK_SPACE (fn) != 0)
1676 return false;
1677 #endif
1679 /* If any argument goes in memory, then it might clobber an outgoing
1680 argument. */
1682 CUMULATIVE_ARGS args_so_far_v;
1683 cumulative_args_t args_so_far;
1684 tree fn, arg;
1686 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1687 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1688 args_so_far = pack_cumulative_args (&args_so_far_v);
1690 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1691 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1693 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1694 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1695 NULL_TREE, true);
1696 if (!tmp || !REG_P (tmp))
1697 return false;
1698 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1699 return false;
1700 targetm.calls.function_arg_advance (args_so_far, mode,
1701 NULL_TREE, true);
1704 return true;
1707 /* A subroutine of emit_block_move. Expand a movmem pattern;
1708 return true if successful. */
1710 static bool
1711 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1712 unsigned int expected_align, HOST_WIDE_INT expected_size,
1713 unsigned HOST_WIDE_INT min_size,
1714 unsigned HOST_WIDE_INT max_size,
1715 unsigned HOST_WIDE_INT probable_max_size)
1717 int save_volatile_ok = volatile_ok;
1718 machine_mode mode;
1720 if (expected_align < align)
1721 expected_align = align;
1722 if (expected_size != -1)
1724 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1725 expected_size = probable_max_size;
1726 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1727 expected_size = min_size;
1730 /* Since this is a move insn, we don't care about volatility. */
1731 volatile_ok = 1;
1733 /* Try the most limited insn first, because there's no point
1734 including more than one in the machine description unless
1735 the more limited one has some advantage. */
1737 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
1739 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1741 if (code != CODE_FOR_nothing
1742 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1743 here because if SIZE is less than the mode mask, as it is
1744 returned by the macro, it will definitely be less than the
1745 actual mode mask. Since SIZE is within the Pmode address
1746 space, we limit MODE to Pmode. */
1747 && ((CONST_INT_P (size)
1748 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1749 <= (GET_MODE_MASK (mode) >> 1)))
1750 || max_size <= (GET_MODE_MASK (mode) >> 1)
1751 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1753 struct expand_operand ops[9];
1754 unsigned int nops;
1756 /* ??? When called via emit_block_move_for_call, it'd be
1757 nice if there were some way to inform the backend, so
1758 that it doesn't fail the expansion because it thinks
1759 emitting the libcall would be more efficient. */
1760 nops = insn_data[(int) code].n_generator_args;
1761 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1763 create_fixed_operand (&ops[0], x);
1764 create_fixed_operand (&ops[1], y);
1765 /* The check above guarantees that this size conversion is valid. */
1766 create_convert_operand_to (&ops[2], size, mode, true);
1767 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1768 if (nops >= 6)
1770 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1771 create_integer_operand (&ops[5], expected_size);
1773 if (nops >= 8)
1775 create_integer_operand (&ops[6], min_size);
1776 /* If we can not represent the maximal size,
1777 make parameter NULL. */
1778 if ((HOST_WIDE_INT) max_size != -1)
1779 create_integer_operand (&ops[7], max_size);
1780 else
1781 create_fixed_operand (&ops[7], NULL);
1783 if (nops == 9)
1785 /* If we can not represent the maximal size,
1786 make parameter NULL. */
1787 if ((HOST_WIDE_INT) probable_max_size != -1)
1788 create_integer_operand (&ops[8], probable_max_size);
1789 else
1790 create_fixed_operand (&ops[8], NULL);
1792 if (maybe_expand_insn (code, nops, ops))
1794 volatile_ok = save_volatile_ok;
1795 return true;
1800 volatile_ok = save_volatile_ok;
1801 return false;
1804 /* A subroutine of emit_block_move. Copy the data via an explicit
1805 loop. This is used only when libcalls are forbidden. */
1806 /* ??? It'd be nice to copy in hunks larger than QImode. */
1808 static void
1809 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1810 unsigned int align ATTRIBUTE_UNUSED)
1812 rtx_code_label *cmp_label, *top_label;
1813 rtx iter, x_addr, y_addr, tmp;
1814 machine_mode x_addr_mode = get_address_mode (x);
1815 machine_mode y_addr_mode = get_address_mode (y);
1816 machine_mode iter_mode;
1818 iter_mode = GET_MODE (size);
1819 if (iter_mode == VOIDmode)
1820 iter_mode = word_mode;
1822 top_label = gen_label_rtx ();
1823 cmp_label = gen_label_rtx ();
1824 iter = gen_reg_rtx (iter_mode);
1826 emit_move_insn (iter, const0_rtx);
1828 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1829 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1830 do_pending_stack_adjust ();
1832 emit_jump (cmp_label);
1833 emit_label (top_label);
1835 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1836 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1838 if (x_addr_mode != y_addr_mode)
1839 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1840 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1842 x = change_address (x, QImode, x_addr);
1843 y = change_address (y, QImode, y_addr);
1845 emit_move_insn (x, y);
1847 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1848 true, OPTAB_LIB_WIDEN);
1849 if (tmp != iter)
1850 emit_move_insn (iter, tmp);
1852 emit_label (cmp_label);
1854 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1855 true, top_label,
1856 profile_probability::guessed_always ()
1857 .apply_scale (9, 10));
1860 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1861 TAILCALL is true if this is a tail call. */
1864 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1865 rtx size, bool tailcall)
1867 rtx dst_addr, src_addr;
1868 tree call_expr, dst_tree, src_tree, size_tree;
1869 machine_mode size_mode;
1871 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1872 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1873 dst_tree = make_tree (ptr_type_node, dst_addr);
1875 src_addr = copy_addr_to_reg (XEXP (src, 0));
1876 src_addr = convert_memory_address (ptr_mode, src_addr);
1877 src_tree = make_tree (ptr_type_node, src_addr);
1879 size_mode = TYPE_MODE (sizetype);
1880 size = convert_to_mode (size_mode, size, 1);
1881 size = copy_to_mode_reg (size_mode, size);
1882 size_tree = make_tree (sizetype, size);
1884 /* It is incorrect to use the libcall calling conventions for calls to
1885 memcpy/memmove/memcmp because they can be provided by the user. */
1886 tree fn = builtin_decl_implicit (fncode);
1887 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1888 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1890 return expand_call (call_expr, NULL_RTX, false);
1893 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1894 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1895 otherwise return null. */
1898 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1899 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1900 HOST_WIDE_INT align)
1902 machine_mode insn_mode = insn_data[icode].operand[0].mode;
1904 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1905 target = NULL_RTX;
1907 struct expand_operand ops[5];
1908 create_output_operand (&ops[0], target, insn_mode);
1909 create_fixed_operand (&ops[1], arg1_rtx);
1910 create_fixed_operand (&ops[2], arg2_rtx);
1911 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1912 TYPE_UNSIGNED (arg3_type));
1913 create_integer_operand (&ops[4], align);
1914 if (maybe_expand_insn (icode, 5, ops))
1915 return ops[0].value;
1916 return NULL_RTX;
1919 /* Expand a block compare between X and Y with length LEN using the
1920 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1921 of the expression that was used to calculate the length. ALIGN
1922 gives the known minimum common alignment. */
1924 static rtx
1925 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
1926 unsigned align)
1928 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1929 implementing memcmp because it will stop if it encounters two
1930 zero bytes. */
1931 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
1933 if (icode == CODE_FOR_nothing)
1934 return NULL_RTX;
1936 return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
1939 /* Emit code to compare a block Y to a block X. This may be done with
1940 string-compare instructions, with multiple scalar instructions,
1941 or with a library call.
1943 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1944 they are. LEN_TYPE is the type of the expression that was used to
1945 calculate it.
1947 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1948 value of a normal memcmp call, instead we can just compare for equality.
1949 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1950 returning NULL_RTX.
1952 Optionally, the caller can pass a constfn and associated data in Y_CFN
1953 and Y_CFN_DATA. describing that the second operand being compared is a
1954 known constant and how to obtain its data.
1955 Return the result of the comparison, or NULL_RTX if we failed to
1956 perform the operation. */
1959 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
1960 bool equality_only, by_pieces_constfn y_cfn,
1961 void *y_cfndata)
1963 rtx result = 0;
1965 if (CONST_INT_P (len) && INTVAL (len) == 0)
1966 return const0_rtx;
1968 gcc_assert (MEM_P (x) && MEM_P (y));
1969 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1970 gcc_assert (align >= BITS_PER_UNIT);
1972 x = adjust_address (x, BLKmode, 0);
1973 y = adjust_address (y, BLKmode, 0);
1975 if (equality_only
1976 && CONST_INT_P (len)
1977 && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
1978 result = compare_by_pieces (x, y, INTVAL (len), target, align,
1979 y_cfn, y_cfndata);
1980 else
1981 result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
1983 return result;
1986 /* Copy all or part of a value X into registers starting at REGNO.
1987 The number of registers to be filled is NREGS. */
1989 void
1990 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1992 if (nregs == 0)
1993 return;
1995 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1996 x = validize_mem (force_const_mem (mode, x));
1998 /* See if the machine can do this with a load multiple insn. */
1999 if (targetm.have_load_multiple ())
2001 rtx_insn *last = get_last_insn ();
2002 rtx first = gen_rtx_REG (word_mode, regno);
2003 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2004 GEN_INT (nregs)))
2006 emit_insn (pat);
2007 return;
2009 else
2010 delete_insns_since (last);
2013 for (int i = 0; i < nregs; i++)
2014 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2015 operand_subword_force (x, i, mode));
2018 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2019 The number of registers to be filled is NREGS. */
2021 void
2022 move_block_from_reg (int regno, rtx x, int nregs)
2024 if (nregs == 0)
2025 return;
2027 /* See if the machine can do this with a store multiple insn. */
2028 if (targetm.have_store_multiple ())
2030 rtx_insn *last = get_last_insn ();
2031 rtx first = gen_rtx_REG (word_mode, regno);
2032 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2033 GEN_INT (nregs)))
2035 emit_insn (pat);
2036 return;
2038 else
2039 delete_insns_since (last);
2042 for (int i = 0; i < nregs; i++)
2044 rtx tem = operand_subword (x, i, 1, BLKmode);
2046 gcc_assert (tem);
2048 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2052 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2053 ORIG, where ORIG is a non-consecutive group of registers represented by
2054 a PARALLEL. The clone is identical to the original except in that the
2055 original set of registers is replaced by a new set of pseudo registers.
2056 The new set has the same modes as the original set. */
2059 gen_group_rtx (rtx orig)
2061 int i, length;
2062 rtx *tmps;
2064 gcc_assert (GET_CODE (orig) == PARALLEL);
2066 length = XVECLEN (orig, 0);
2067 tmps = XALLOCAVEC (rtx, length);
2069 /* Skip a NULL entry in first slot. */
2070 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2072 if (i)
2073 tmps[0] = 0;
2075 for (; i < length; i++)
2077 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2078 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2080 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2083 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2086 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2087 except that values are placed in TMPS[i], and must later be moved
2088 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2090 static void
2091 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
2093 rtx src;
2094 int start, i;
2095 machine_mode m = GET_MODE (orig_src);
2097 gcc_assert (GET_CODE (dst) == PARALLEL);
2099 if (m != VOIDmode
2100 && !SCALAR_INT_MODE_P (m)
2101 && !MEM_P (orig_src)
2102 && GET_CODE (orig_src) != CONCAT)
2104 scalar_int_mode imode;
2105 if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2107 src = gen_reg_rtx (imode);
2108 emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2110 else
2112 src = assign_stack_temp (GET_MODE (orig_src), ssize);
2113 emit_move_insn (src, orig_src);
2115 emit_group_load_1 (tmps, dst, src, type, ssize);
2116 return;
2119 /* Check for a NULL entry, used to indicate that the parameter goes
2120 both on the stack and in registers. */
2121 if (XEXP (XVECEXP (dst, 0, 0), 0))
2122 start = 0;
2123 else
2124 start = 1;
2126 /* Process the pieces. */
2127 for (i = start; i < XVECLEN (dst, 0); i++)
2129 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2130 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2131 unsigned int bytelen = GET_MODE_SIZE (mode);
2132 int shift = 0;
2134 /* Handle trailing fragments that run over the size of the struct. */
2135 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2137 /* Arrange to shift the fragment to where it belongs.
2138 extract_bit_field loads to the lsb of the reg. */
2139 if (
2140 #ifdef BLOCK_REG_PADDING
2141 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2142 == (BYTES_BIG_ENDIAN ? upward : downward)
2143 #else
2144 BYTES_BIG_ENDIAN
2145 #endif
2147 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2148 bytelen = ssize - bytepos;
2149 gcc_assert (bytelen > 0);
2152 /* If we won't be loading directly from memory, protect the real source
2153 from strange tricks we might play; but make sure that the source can
2154 be loaded directly into the destination. */
2155 src = orig_src;
2156 if (!MEM_P (orig_src)
2157 && (!CONSTANT_P (orig_src)
2158 || (GET_MODE (orig_src) != mode
2159 && GET_MODE (orig_src) != VOIDmode)))
2161 if (GET_MODE (orig_src) == VOIDmode)
2162 src = gen_reg_rtx (mode);
2163 else
2164 src = gen_reg_rtx (GET_MODE (orig_src));
2166 emit_move_insn (src, orig_src);
2169 /* Optimize the access just a bit. */
2170 if (MEM_P (src)
2171 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2172 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2173 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2174 && bytelen == GET_MODE_SIZE (mode))
2176 tmps[i] = gen_reg_rtx (mode);
2177 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2179 else if (COMPLEX_MODE_P (mode)
2180 && GET_MODE (src) == mode
2181 && bytelen == GET_MODE_SIZE (mode))
2182 /* Let emit_move_complex do the bulk of the work. */
2183 tmps[i] = src;
2184 else if (GET_CODE (src) == CONCAT)
2186 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2187 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2188 unsigned int elt = bytepos / slen0;
2189 unsigned int subpos = bytepos % slen0;
2191 if (subpos + bytelen <= slen0)
2193 /* The following assumes that the concatenated objects all
2194 have the same size. In this case, a simple calculation
2195 can be used to determine the object and the bit field
2196 to be extracted. */
2197 tmps[i] = XEXP (src, elt);
2198 if (subpos != 0
2199 || subpos + bytelen != slen0
2200 || (!CONSTANT_P (tmps[i])
2201 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2202 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2203 subpos * BITS_PER_UNIT,
2204 1, NULL_RTX, mode, mode, false,
2205 NULL);
2207 else
2209 rtx mem;
2211 gcc_assert (!bytepos);
2212 mem = assign_stack_temp (GET_MODE (src), slen);
2213 emit_move_insn (mem, src);
2214 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2215 0, 1, NULL_RTX, mode, mode, false,
2216 NULL);
2219 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2220 SIMD register, which is currently broken. While we get GCC
2221 to emit proper RTL for these cases, let's dump to memory. */
2222 else if (VECTOR_MODE_P (GET_MODE (dst))
2223 && REG_P (src))
2225 int slen = GET_MODE_SIZE (GET_MODE (src));
2226 rtx mem;
2228 mem = assign_stack_temp (GET_MODE (src), slen);
2229 emit_move_insn (mem, src);
2230 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2232 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2233 && XVECLEN (dst, 0) > 1)
2234 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2235 else if (CONSTANT_P (src))
2237 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
2239 if (len == ssize)
2240 tmps[i] = src;
2241 else
2243 rtx first, second;
2245 /* TODO: const_wide_int can have sizes other than this... */
2246 gcc_assert (2 * len == ssize);
2247 split_double (src, &first, &second);
2248 if (i)
2249 tmps[i] = second;
2250 else
2251 tmps[i] = first;
2254 else if (REG_P (src) && GET_MODE (src) == mode)
2255 tmps[i] = src;
2256 else
2257 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2258 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2259 mode, mode, false, NULL);
2261 if (shift)
2262 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2263 shift, tmps[i], 0);
2267 /* Emit code to move a block SRC of type TYPE to a block DST,
2268 where DST is non-consecutive registers represented by a PARALLEL.
2269 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2270 if not known. */
2272 void
2273 emit_group_load (rtx dst, rtx src, tree type, int ssize)
2275 rtx *tmps;
2276 int i;
2278 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2279 emit_group_load_1 (tmps, dst, src, type, ssize);
2281 /* Copy the extracted pieces into the proper (probable) hard regs. */
2282 for (i = 0; i < XVECLEN (dst, 0); i++)
2284 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2285 if (d == NULL)
2286 continue;
2287 emit_move_insn (d, tmps[i]);
2291 /* Similar, but load SRC into new pseudos in a format that looks like
2292 PARALLEL. This can later be fed to emit_group_move to get things
2293 in the right place. */
2296 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
2298 rtvec vec;
2299 int i;
2301 vec = rtvec_alloc (XVECLEN (parallel, 0));
2302 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2304 /* Convert the vector to look just like the original PARALLEL, except
2305 with the computed values. */
2306 for (i = 0; i < XVECLEN (parallel, 0); i++)
2308 rtx e = XVECEXP (parallel, 0, i);
2309 rtx d = XEXP (e, 0);
2311 if (d)
2313 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2314 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2316 RTVEC_ELT (vec, i) = e;
2319 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2322 /* Emit code to move a block SRC to block DST, where SRC and DST are
2323 non-consecutive groups of registers, each represented by a PARALLEL. */
2325 void
2326 emit_group_move (rtx dst, rtx src)
2328 int i;
2330 gcc_assert (GET_CODE (src) == PARALLEL
2331 && GET_CODE (dst) == PARALLEL
2332 && XVECLEN (src, 0) == XVECLEN (dst, 0));
2334 /* Skip first entry if NULL. */
2335 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2336 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2337 XEXP (XVECEXP (src, 0, i), 0));
2340 /* Move a group of registers represented by a PARALLEL into pseudos. */
2343 emit_group_move_into_temps (rtx src)
2345 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2346 int i;
2348 for (i = 0; i < XVECLEN (src, 0); i++)
2350 rtx e = XVECEXP (src, 0, i);
2351 rtx d = XEXP (e, 0);
2353 if (d)
2354 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2355 RTVEC_ELT (vec, i) = e;
2358 return gen_rtx_PARALLEL (GET_MODE (src), vec);
2361 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2362 where SRC is non-consecutive registers represented by a PARALLEL.
2363 SSIZE represents the total size of block ORIG_DST, or -1 if not
2364 known. */
2366 void
2367 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2369 rtx *tmps, dst;
2370 int start, finish, i;
2371 machine_mode m = GET_MODE (orig_dst);
2373 gcc_assert (GET_CODE (src) == PARALLEL);
2375 if (!SCALAR_INT_MODE_P (m)
2376 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2378 scalar_int_mode imode;
2379 if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2381 dst = gen_reg_rtx (imode);
2382 emit_group_store (dst, src, type, ssize);
2383 dst = gen_lowpart (GET_MODE (orig_dst), dst);
2385 else
2387 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2388 emit_group_store (dst, src, type, ssize);
2390 emit_move_insn (orig_dst, dst);
2391 return;
2394 /* Check for a NULL entry, used to indicate that the parameter goes
2395 both on the stack and in registers. */
2396 if (XEXP (XVECEXP (src, 0, 0), 0))
2397 start = 0;
2398 else
2399 start = 1;
2400 finish = XVECLEN (src, 0);
2402 tmps = XALLOCAVEC (rtx, finish);
2404 /* Copy the (probable) hard regs into pseudos. */
2405 for (i = start; i < finish; i++)
2407 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2408 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2410 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2411 emit_move_insn (tmps[i], reg);
2413 else
2414 tmps[i] = reg;
2417 /* If we won't be storing directly into memory, protect the real destination
2418 from strange tricks we might play. */
2419 dst = orig_dst;
2420 if (GET_CODE (dst) == PARALLEL)
2422 rtx temp;
2424 /* We can get a PARALLEL dst if there is a conditional expression in
2425 a return statement. In that case, the dst and src are the same,
2426 so no action is necessary. */
2427 if (rtx_equal_p (dst, src))
2428 return;
2430 /* It is unclear if we can ever reach here, but we may as well handle
2431 it. Allocate a temporary, and split this into a store/load to/from
2432 the temporary. */
2433 temp = assign_stack_temp (GET_MODE (dst), ssize);
2434 emit_group_store (temp, src, type, ssize);
2435 emit_group_load (dst, temp, type, ssize);
2436 return;
2438 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2440 machine_mode outer = GET_MODE (dst);
2441 machine_mode inner;
2442 HOST_WIDE_INT bytepos;
2443 bool done = false;
2444 rtx temp;
2446 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2447 dst = gen_reg_rtx (outer);
2449 /* Make life a bit easier for combine. */
2450 /* If the first element of the vector is the low part
2451 of the destination mode, use a paradoxical subreg to
2452 initialize the destination. */
2453 if (start < finish)
2455 inner = GET_MODE (tmps[start]);
2456 bytepos = subreg_lowpart_offset (inner, outer);
2457 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2459 temp = simplify_gen_subreg (outer, tmps[start],
2460 inner, 0);
2461 if (temp)
2463 emit_move_insn (dst, temp);
2464 done = true;
2465 start++;
2470 /* If the first element wasn't the low part, try the last. */
2471 if (!done
2472 && start < finish - 1)
2474 inner = GET_MODE (tmps[finish - 1]);
2475 bytepos = subreg_lowpart_offset (inner, outer);
2476 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2478 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2479 inner, 0);
2480 if (temp)
2482 emit_move_insn (dst, temp);
2483 done = true;
2484 finish--;
2489 /* Otherwise, simply initialize the result to zero. */
2490 if (!done)
2491 emit_move_insn (dst, CONST0_RTX (outer));
2494 /* Process the pieces. */
2495 for (i = start; i < finish; i++)
2497 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2498 machine_mode mode = GET_MODE (tmps[i]);
2499 unsigned int bytelen = GET_MODE_SIZE (mode);
2500 unsigned int adj_bytelen;
2501 rtx dest = dst;
2503 /* Handle trailing fragments that run over the size of the struct. */
2504 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2505 adj_bytelen = ssize - bytepos;
2506 else
2507 adj_bytelen = bytelen;
2509 if (GET_CODE (dst) == CONCAT)
2511 if (bytepos + adj_bytelen
2512 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2513 dest = XEXP (dst, 0);
2514 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2516 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2517 dest = XEXP (dst, 1);
2519 else
2521 machine_mode dest_mode = GET_MODE (dest);
2522 machine_mode tmp_mode = GET_MODE (tmps[i]);
2524 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2526 if (GET_MODE_ALIGNMENT (dest_mode)
2527 >= GET_MODE_ALIGNMENT (tmp_mode))
2529 dest = assign_stack_temp (dest_mode,
2530 GET_MODE_SIZE (dest_mode));
2531 emit_move_insn (adjust_address (dest,
2532 tmp_mode,
2533 bytepos),
2534 tmps[i]);
2535 dst = dest;
2537 else
2539 dest = assign_stack_temp (tmp_mode,
2540 GET_MODE_SIZE (tmp_mode));
2541 emit_move_insn (dest, tmps[i]);
2542 dst = adjust_address (dest, dest_mode, bytepos);
2544 break;
2548 /* Handle trailing fragments that run over the size of the struct. */
2549 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2551 /* store_bit_field always takes its value from the lsb.
2552 Move the fragment to the lsb if it's not already there. */
2553 if (
2554 #ifdef BLOCK_REG_PADDING
2555 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2556 == (BYTES_BIG_ENDIAN ? upward : downward)
2557 #else
2558 BYTES_BIG_ENDIAN
2559 #endif
2562 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2563 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2564 shift, tmps[i], 0);
2567 /* Make sure not to write past the end of the struct. */
2568 store_bit_field (dest,
2569 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2570 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2571 VOIDmode, tmps[i], false);
2574 /* Optimize the access just a bit. */
2575 else if (MEM_P (dest)
2576 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2577 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2578 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2579 && bytelen == GET_MODE_SIZE (mode))
2580 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2582 else
2583 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2584 0, 0, mode, tmps[i], false);
2587 /* Copy from the pseudo into the (probable) hard reg. */
2588 if (orig_dst != dst)
2589 emit_move_insn (orig_dst, dst);
2592 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2593 of the value stored in X. */
2596 maybe_emit_group_store (rtx x, tree type)
2598 machine_mode mode = TYPE_MODE (type);
2599 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2600 if (GET_CODE (x) == PARALLEL)
2602 rtx result = gen_reg_rtx (mode);
2603 emit_group_store (result, x, type, int_size_in_bytes (type));
2604 return result;
2606 return x;
2609 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2611 This is used on targets that return BLKmode values in registers. */
2613 static void
2614 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2616 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2617 rtx src = NULL, dst = NULL;
2618 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2619 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2620 machine_mode mode = GET_MODE (srcreg);
2621 machine_mode tmode = GET_MODE (target);
2622 machine_mode copy_mode;
2624 /* BLKmode registers created in the back-end shouldn't have survived. */
2625 gcc_assert (mode != BLKmode);
2627 /* If the structure doesn't take up a whole number of words, see whether
2628 SRCREG is padded on the left or on the right. If it's on the left,
2629 set PADDING_CORRECTION to the number of bits to skip.
2631 In most ABIs, the structure will be returned at the least end of
2632 the register, which translates to right padding on little-endian
2633 targets and left padding on big-endian targets. The opposite
2634 holds if the structure is returned at the most significant
2635 end of the register. */
2636 if (bytes % UNITS_PER_WORD != 0
2637 && (targetm.calls.return_in_msb (type)
2638 ? !BYTES_BIG_ENDIAN
2639 : BYTES_BIG_ENDIAN))
2640 padding_correction
2641 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2643 /* We can use a single move if we have an exact mode for the size. */
2644 else if (MEM_P (target)
2645 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2646 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2647 && bytes == GET_MODE_SIZE (mode))
2649 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2650 return;
2653 /* And if we additionally have the same mode for a register. */
2654 else if (REG_P (target)
2655 && GET_MODE (target) == mode
2656 && bytes == GET_MODE_SIZE (mode))
2658 emit_move_insn (target, srcreg);
2659 return;
2662 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2663 into a new pseudo which is a full word. */
2664 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2666 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2667 mode = word_mode;
2670 /* Copy the structure BITSIZE bits at a time. If the target lives in
2671 memory, take care of not reading/writing past its end by selecting
2672 a copy mode suited to BITSIZE. This should always be possible given
2673 how it is computed.
2675 If the target lives in register, make sure not to select a copy mode
2676 larger than the mode of the register.
2678 We could probably emit more efficient code for machines which do not use
2679 strict alignment, but it doesn't seem worth the effort at the current
2680 time. */
2682 copy_mode = word_mode;
2683 if (MEM_P (target))
2685 opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2686 if (mem_mode.exists ())
2687 copy_mode = mem_mode.require ();
2689 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2690 copy_mode = tmode;
2692 for (bitpos = 0, xbitpos = padding_correction;
2693 bitpos < bytes * BITS_PER_UNIT;
2694 bitpos += bitsize, xbitpos += bitsize)
2696 /* We need a new source operand each time xbitpos is on a
2697 word boundary and when xbitpos == padding_correction
2698 (the first time through). */
2699 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2700 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2702 /* We need a new destination operand each time bitpos is on
2703 a word boundary. */
2704 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2705 dst = target;
2706 else if (bitpos % BITS_PER_WORD == 0)
2707 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2709 /* Use xbitpos for the source extraction (right justified) and
2710 bitpos for the destination store (left justified). */
2711 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2712 extract_bit_field (src, bitsize,
2713 xbitpos % BITS_PER_WORD, 1,
2714 NULL_RTX, copy_mode, copy_mode,
2715 false, NULL),
2716 false);
2720 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2721 register if it contains any data, otherwise return null.
2723 This is used on targets that return BLKmode values in registers. */
2726 copy_blkmode_to_reg (machine_mode mode, tree src)
2728 int i, n_regs;
2729 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2730 unsigned int bitsize;
2731 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2732 machine_mode dst_mode;
2734 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2736 x = expand_normal (src);
2738 bytes = int_size_in_bytes (TREE_TYPE (src));
2739 if (bytes == 0)
2740 return NULL_RTX;
2742 /* If the structure doesn't take up a whole number of words, see
2743 whether the register value should be padded on the left or on
2744 the right. Set PADDING_CORRECTION to the number of padding
2745 bits needed on the left side.
2747 In most ABIs, the structure will be returned at the least end of
2748 the register, which translates to right padding on little-endian
2749 targets and left padding on big-endian targets. The opposite
2750 holds if the structure is returned at the most significant
2751 end of the register. */
2752 if (bytes % UNITS_PER_WORD != 0
2753 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2754 ? !BYTES_BIG_ENDIAN
2755 : BYTES_BIG_ENDIAN))
2756 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2757 * BITS_PER_UNIT));
2759 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2760 dst_words = XALLOCAVEC (rtx, n_regs);
2761 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2763 /* Copy the structure BITSIZE bits at a time. */
2764 for (bitpos = 0, xbitpos = padding_correction;
2765 bitpos < bytes * BITS_PER_UNIT;
2766 bitpos += bitsize, xbitpos += bitsize)
2768 /* We need a new destination pseudo each time xbitpos is
2769 on a word boundary and when xbitpos == padding_correction
2770 (the first time through). */
2771 if (xbitpos % BITS_PER_WORD == 0
2772 || xbitpos == padding_correction)
2774 /* Generate an appropriate register. */
2775 dst_word = gen_reg_rtx (word_mode);
2776 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2778 /* Clear the destination before we move anything into it. */
2779 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2782 /* We need a new source operand each time bitpos is on a word
2783 boundary. */
2784 if (bitpos % BITS_PER_WORD == 0)
2785 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2787 /* Use bitpos for the source extraction (left justified) and
2788 xbitpos for the destination store (right justified). */
2789 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2790 0, 0, word_mode,
2791 extract_bit_field (src_word, bitsize,
2792 bitpos % BITS_PER_WORD, 1,
2793 NULL_RTX, word_mode, word_mode,
2794 false, NULL),
2795 false);
2798 if (mode == BLKmode)
2800 /* Find the smallest integer mode large enough to hold the
2801 entire structure. */
2802 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
2803 /* Have we found a large enough mode? */
2804 if (GET_MODE_SIZE (mode) >= bytes)
2805 break;
2807 /* A suitable mode should have been found. */
2808 gcc_assert (mode != VOIDmode);
2811 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2812 dst_mode = word_mode;
2813 else
2814 dst_mode = mode;
2815 dst = gen_reg_rtx (dst_mode);
2817 for (i = 0; i < n_regs; i++)
2818 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2820 if (mode != dst_mode)
2821 dst = gen_lowpart (mode, dst);
2823 return dst;
2826 /* Add a USE expression for REG to the (possibly empty) list pointed
2827 to by CALL_FUSAGE. REG must denote a hard register. */
2829 void
2830 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2832 gcc_assert (REG_P (reg));
2834 if (!HARD_REGISTER_P (reg))
2835 return;
2837 *call_fusage
2838 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2841 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2842 to by CALL_FUSAGE. REG must denote a hard register. */
2844 void
2845 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2847 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2849 *call_fusage
2850 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2853 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2854 starting at REGNO. All of these registers must be hard registers. */
2856 void
2857 use_regs (rtx *call_fusage, int regno, int nregs)
2859 int i;
2861 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2863 for (i = 0; i < nregs; i++)
2864 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2867 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2868 PARALLEL REGS. This is for calls that pass values in multiple
2869 non-contiguous locations. The Irix 6 ABI has examples of this. */
2871 void
2872 use_group_regs (rtx *call_fusage, rtx regs)
2874 int i;
2876 for (i = 0; i < XVECLEN (regs, 0); i++)
2878 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2880 /* A NULL entry means the parameter goes both on the stack and in
2881 registers. This can also be a MEM for targets that pass values
2882 partially on the stack and partially in registers. */
2883 if (reg != 0 && REG_P (reg))
2884 use_reg (call_fusage, reg);
2888 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2889 assigment and the code of the expresion on the RHS is CODE. Return
2890 NULL otherwise. */
2892 static gimple *
2893 get_def_for_expr (tree name, enum tree_code code)
2895 gimple *def_stmt;
2897 if (TREE_CODE (name) != SSA_NAME)
2898 return NULL;
2900 def_stmt = get_gimple_for_ssa_name (name);
2901 if (!def_stmt
2902 || gimple_assign_rhs_code (def_stmt) != code)
2903 return NULL;
2905 return def_stmt;
2908 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2909 assigment and the class of the expresion on the RHS is CLASS. Return
2910 NULL otherwise. */
2912 static gimple *
2913 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2915 gimple *def_stmt;
2917 if (TREE_CODE (name) != SSA_NAME)
2918 return NULL;
2920 def_stmt = get_gimple_for_ssa_name (name);
2921 if (!def_stmt
2922 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2923 return NULL;
2925 return def_stmt;
2928 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2929 its length in bytes. */
2932 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2933 unsigned int expected_align, HOST_WIDE_INT expected_size,
2934 unsigned HOST_WIDE_INT min_size,
2935 unsigned HOST_WIDE_INT max_size,
2936 unsigned HOST_WIDE_INT probable_max_size)
2938 machine_mode mode = GET_MODE (object);
2939 unsigned int align;
2941 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2943 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2944 just move a zero. Otherwise, do this a piece at a time. */
2945 if (mode != BLKmode
2946 && CONST_INT_P (size)
2947 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2949 rtx zero = CONST0_RTX (mode);
2950 if (zero != NULL)
2952 emit_move_insn (object, zero);
2953 return NULL;
2956 if (COMPLEX_MODE_P (mode))
2958 zero = CONST0_RTX (GET_MODE_INNER (mode));
2959 if (zero != NULL)
2961 write_complex_part (object, zero, 0);
2962 write_complex_part (object, zero, 1);
2963 return NULL;
2968 if (size == const0_rtx)
2969 return NULL;
2971 align = MEM_ALIGN (object);
2973 if (CONST_INT_P (size)
2974 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2975 CLEAR_BY_PIECES,
2976 optimize_insn_for_speed_p ()))
2977 clear_by_pieces (object, INTVAL (size), align);
2978 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2979 expected_align, expected_size,
2980 min_size, max_size, probable_max_size))
2982 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2983 return set_storage_via_libcall (object, size, const0_rtx,
2984 method == BLOCK_OP_TAILCALL);
2985 else
2986 gcc_unreachable ();
2988 return NULL;
2992 clear_storage (rtx object, rtx size, enum block_op_methods method)
2994 unsigned HOST_WIDE_INT max, min = 0;
2995 if (GET_CODE (size) == CONST_INT)
2996 min = max = UINTVAL (size);
2997 else
2998 max = GET_MODE_MASK (GET_MODE (size));
2999 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3003 /* A subroutine of clear_storage. Expand a call to memset.
3004 Return the return value of memset, 0 otherwise. */
3007 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3009 tree call_expr, fn, object_tree, size_tree, val_tree;
3010 machine_mode size_mode;
3012 object = copy_addr_to_reg (XEXP (object, 0));
3013 object_tree = make_tree (ptr_type_node, object);
3015 if (!CONST_INT_P (val))
3016 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3017 val_tree = make_tree (integer_type_node, val);
3019 size_mode = TYPE_MODE (sizetype);
3020 size = convert_to_mode (size_mode, size, 1);
3021 size = copy_to_mode_reg (size_mode, size);
3022 size_tree = make_tree (sizetype, size);
3024 /* It is incorrect to use the libcall calling conventions for calls to
3025 memset because it can be provided by the user. */
3026 fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3027 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3028 CALL_EXPR_TAILCALL (call_expr) = tailcall;
3030 return expand_call (call_expr, NULL_RTX, false);
3033 /* Expand a setmem pattern; return true if successful. */
3035 bool
3036 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3037 unsigned int expected_align, HOST_WIDE_INT expected_size,
3038 unsigned HOST_WIDE_INT min_size,
3039 unsigned HOST_WIDE_INT max_size,
3040 unsigned HOST_WIDE_INT probable_max_size)
3042 /* Try the most limited insn first, because there's no point
3043 including more than one in the machine description unless
3044 the more limited one has some advantage. */
3046 machine_mode mode;
3048 if (expected_align < align)
3049 expected_align = align;
3050 if (expected_size != -1)
3052 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3053 expected_size = max_size;
3054 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3055 expected_size = min_size;
3058 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
3060 enum insn_code code = direct_optab_handler (setmem_optab, mode);
3062 if (code != CODE_FOR_nothing
3063 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3064 here because if SIZE is less than the mode mask, as it is
3065 returned by the macro, it will definitely be less than the
3066 actual mode mask. Since SIZE is within the Pmode address
3067 space, we limit MODE to Pmode. */
3068 && ((CONST_INT_P (size)
3069 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3070 <= (GET_MODE_MASK (mode) >> 1)))
3071 || max_size <= (GET_MODE_MASK (mode) >> 1)
3072 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3074 struct expand_operand ops[9];
3075 unsigned int nops;
3077 nops = insn_data[(int) code].n_generator_args;
3078 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3080 create_fixed_operand (&ops[0], object);
3081 /* The check above guarantees that this size conversion is valid. */
3082 create_convert_operand_to (&ops[1], size, mode, true);
3083 create_convert_operand_from (&ops[2], val, byte_mode, true);
3084 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3085 if (nops >= 6)
3087 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3088 create_integer_operand (&ops[5], expected_size);
3090 if (nops >= 8)
3092 create_integer_operand (&ops[6], min_size);
3093 /* If we can not represent the maximal size,
3094 make parameter NULL. */
3095 if ((HOST_WIDE_INT) max_size != -1)
3096 create_integer_operand (&ops[7], max_size);
3097 else
3098 create_fixed_operand (&ops[7], NULL);
3100 if (nops == 9)
3102 /* If we can not represent the maximal size,
3103 make parameter NULL. */
3104 if ((HOST_WIDE_INT) probable_max_size != -1)
3105 create_integer_operand (&ops[8], probable_max_size);
3106 else
3107 create_fixed_operand (&ops[8], NULL);
3109 if (maybe_expand_insn (code, nops, ops))
3110 return true;
3114 return false;
3118 /* Write to one of the components of the complex value CPLX. Write VAL to
3119 the real part if IMAG_P is false, and the imaginary part if its true. */
3121 void
3122 write_complex_part (rtx cplx, rtx val, bool imag_p)
3124 machine_mode cmode;
3125 machine_mode imode;
3126 unsigned ibitsize;
3128 if (GET_CODE (cplx) == CONCAT)
3130 emit_move_insn (XEXP (cplx, imag_p), val);
3131 return;
3134 cmode = GET_MODE (cplx);
3135 imode = GET_MODE_INNER (cmode);
3136 ibitsize = GET_MODE_BITSIZE (imode);
3138 /* For MEMs simplify_gen_subreg may generate an invalid new address
3139 because, e.g., the original address is considered mode-dependent
3140 by the target, which restricts simplify_subreg from invoking
3141 adjust_address_nv. Instead of preparing fallback support for an
3142 invalid address, we call adjust_address_nv directly. */
3143 if (MEM_P (cplx))
3145 emit_move_insn (adjust_address_nv (cplx, imode,
3146 imag_p ? GET_MODE_SIZE (imode) : 0),
3147 val);
3148 return;
3151 /* If the sub-object is at least word sized, then we know that subregging
3152 will work. This special case is important, since store_bit_field
3153 wants to operate on integer modes, and there's rarely an OImode to
3154 correspond to TCmode. */
3155 if (ibitsize >= BITS_PER_WORD
3156 /* For hard regs we have exact predicates. Assume we can split
3157 the original object if it spans an even number of hard regs.
3158 This special case is important for SCmode on 64-bit platforms
3159 where the natural size of floating-point regs is 32-bit. */
3160 || (REG_P (cplx)
3161 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3162 && REG_NREGS (cplx) % 2 == 0))
3164 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3165 imag_p ? GET_MODE_SIZE (imode) : 0);
3166 if (part)
3168 emit_move_insn (part, val);
3169 return;
3171 else
3172 /* simplify_gen_subreg may fail for sub-word MEMs. */
3173 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3176 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3177 false);
3180 /* Extract one of the components of the complex value CPLX. Extract the
3181 real part if IMAG_P is false, and the imaginary part if it's true. */
3184 read_complex_part (rtx cplx, bool imag_p)
3186 machine_mode cmode, imode;
3187 unsigned ibitsize;
3189 if (GET_CODE (cplx) == CONCAT)
3190 return XEXP (cplx, imag_p);
3192 cmode = GET_MODE (cplx);
3193 imode = GET_MODE_INNER (cmode);
3194 ibitsize = GET_MODE_BITSIZE (imode);
3196 /* Special case reads from complex constants that got spilled to memory. */
3197 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3199 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3200 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3202 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3203 if (CONSTANT_CLASS_P (part))
3204 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3208 /* For MEMs simplify_gen_subreg may generate an invalid new address
3209 because, e.g., the original address is considered mode-dependent
3210 by the target, which restricts simplify_subreg from invoking
3211 adjust_address_nv. Instead of preparing fallback support for an
3212 invalid address, we call adjust_address_nv directly. */
3213 if (MEM_P (cplx))
3214 return adjust_address_nv (cplx, imode,
3215 imag_p ? GET_MODE_SIZE (imode) : 0);
3217 /* If the sub-object is at least word sized, then we know that subregging
3218 will work. This special case is important, since extract_bit_field
3219 wants to operate on integer modes, and there's rarely an OImode to
3220 correspond to TCmode. */
3221 if (ibitsize >= BITS_PER_WORD
3222 /* For hard regs we have exact predicates. Assume we can split
3223 the original object if it spans an even number of hard regs.
3224 This special case is important for SCmode on 64-bit platforms
3225 where the natural size of floating-point regs is 32-bit. */
3226 || (REG_P (cplx)
3227 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3228 && REG_NREGS (cplx) % 2 == 0))
3230 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3231 imag_p ? GET_MODE_SIZE (imode) : 0);
3232 if (ret)
3233 return ret;
3234 else
3235 /* simplify_gen_subreg may fail for sub-word MEMs. */
3236 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3239 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3240 true, NULL_RTX, imode, imode, false, NULL);
3243 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3244 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3245 represented in NEW_MODE. If FORCE is true, this will never happen, as
3246 we'll force-create a SUBREG if needed. */
3248 static rtx
3249 emit_move_change_mode (machine_mode new_mode,
3250 machine_mode old_mode, rtx x, bool force)
3252 rtx ret;
3254 if (push_operand (x, GET_MODE (x)))
3256 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3257 MEM_COPY_ATTRIBUTES (ret, x);
3259 else if (MEM_P (x))
3261 /* We don't have to worry about changing the address since the
3262 size in bytes is supposed to be the same. */
3263 if (reload_in_progress)
3265 /* Copy the MEM to change the mode and move any
3266 substitutions from the old MEM to the new one. */
3267 ret = adjust_address_nv (x, new_mode, 0);
3268 copy_replacements (x, ret);
3270 else
3271 ret = adjust_address (x, new_mode, 0);
3273 else
3275 /* Note that we do want simplify_subreg's behavior of validating
3276 that the new mode is ok for a hard register. If we were to use
3277 simplify_gen_subreg, we would create the subreg, but would
3278 probably run into the target not being able to implement it. */
3279 /* Except, of course, when FORCE is true, when this is exactly what
3280 we want. Which is needed for CCmodes on some targets. */
3281 if (force)
3282 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3283 else
3284 ret = simplify_subreg (new_mode, x, old_mode, 0);
3287 return ret;
3290 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3291 an integer mode of the same size as MODE. Returns the instruction
3292 emitted, or NULL if such a move could not be generated. */
3294 static rtx_insn *
3295 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3297 scalar_int_mode imode;
3298 enum insn_code code;
3300 /* There must exist a mode of the exact size we require. */
3301 if (!int_mode_for_mode (mode).exists (&imode))
3302 return NULL;
3304 /* The target must support moves in this mode. */
3305 code = optab_handler (mov_optab, imode);
3306 if (code == CODE_FOR_nothing)
3307 return NULL;
3309 x = emit_move_change_mode (imode, mode, x, force);
3310 if (x == NULL_RTX)
3311 return NULL;
3312 y = emit_move_change_mode (imode, mode, y, force);
3313 if (y == NULL_RTX)
3314 return NULL;
3315 return emit_insn (GEN_FCN (code) (x, y));
3318 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3319 Return an equivalent MEM that does not use an auto-increment. */
3322 emit_move_resolve_push (machine_mode mode, rtx x)
3324 enum rtx_code code = GET_CODE (XEXP (x, 0));
3325 HOST_WIDE_INT adjust;
3326 rtx temp;
3328 adjust = GET_MODE_SIZE (mode);
3329 #ifdef PUSH_ROUNDING
3330 adjust = PUSH_ROUNDING (adjust);
3331 #endif
3332 if (code == PRE_DEC || code == POST_DEC)
3333 adjust = -adjust;
3334 else if (code == PRE_MODIFY || code == POST_MODIFY)
3336 rtx expr = XEXP (XEXP (x, 0), 1);
3337 HOST_WIDE_INT val;
3339 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3340 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3341 val = INTVAL (XEXP (expr, 1));
3342 if (GET_CODE (expr) == MINUS)
3343 val = -val;
3344 gcc_assert (adjust == val || adjust == -val);
3345 adjust = val;
3348 /* Do not use anti_adjust_stack, since we don't want to update
3349 stack_pointer_delta. */
3350 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3351 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3352 0, OPTAB_LIB_WIDEN);
3353 if (temp != stack_pointer_rtx)
3354 emit_move_insn (stack_pointer_rtx, temp);
3356 switch (code)
3358 case PRE_INC:
3359 case PRE_DEC:
3360 case PRE_MODIFY:
3361 temp = stack_pointer_rtx;
3362 break;
3363 case POST_INC:
3364 case POST_DEC:
3365 case POST_MODIFY:
3366 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3367 break;
3368 default:
3369 gcc_unreachable ();
3372 return replace_equiv_address (x, temp);
3375 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3376 X is known to satisfy push_operand, and MODE is known to be complex.
3377 Returns the last instruction emitted. */
3379 rtx_insn *
3380 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3382 machine_mode submode = GET_MODE_INNER (mode);
3383 bool imag_first;
3385 #ifdef PUSH_ROUNDING
3386 unsigned int submodesize = GET_MODE_SIZE (submode);
3388 /* In case we output to the stack, but the size is smaller than the
3389 machine can push exactly, we need to use move instructions. */
3390 if (PUSH_ROUNDING (submodesize) != submodesize)
3392 x = emit_move_resolve_push (mode, x);
3393 return emit_move_insn (x, y);
3395 #endif
3397 /* Note that the real part always precedes the imag part in memory
3398 regardless of machine's endianness. */
3399 switch (GET_CODE (XEXP (x, 0)))
3401 case PRE_DEC:
3402 case POST_DEC:
3403 imag_first = true;
3404 break;
3405 case PRE_INC:
3406 case POST_INC:
3407 imag_first = false;
3408 break;
3409 default:
3410 gcc_unreachable ();
3413 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3414 read_complex_part (y, imag_first));
3415 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3416 read_complex_part (y, !imag_first));
3419 /* A subroutine of emit_move_complex. Perform the move from Y to X
3420 via two moves of the parts. Returns the last instruction emitted. */
3422 rtx_insn *
3423 emit_move_complex_parts (rtx x, rtx y)
3425 /* Show the output dies here. This is necessary for SUBREGs
3426 of pseudos since we cannot track their lifetimes correctly;
3427 hard regs shouldn't appear here except as return values. */
3428 if (!reload_completed && !reload_in_progress
3429 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3430 emit_clobber (x);
3432 write_complex_part (x, read_complex_part (y, false), false);
3433 write_complex_part (x, read_complex_part (y, true), true);
3435 return get_last_insn ();
3438 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3439 MODE is known to be complex. Returns the last instruction emitted. */
3441 static rtx_insn *
3442 emit_move_complex (machine_mode mode, rtx x, rtx y)
3444 bool try_int;
3446 /* Need to take special care for pushes, to maintain proper ordering
3447 of the data, and possibly extra padding. */
3448 if (push_operand (x, mode))
3449 return emit_move_complex_push (mode, x, y);
3451 /* See if we can coerce the target into moving both values at once, except
3452 for floating point where we favor moving as parts if this is easy. */
3453 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3454 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3455 && !(REG_P (x)
3456 && HARD_REGISTER_P (x)
3457 && REG_NREGS (x) == 1)
3458 && !(REG_P (y)
3459 && HARD_REGISTER_P (y)
3460 && REG_NREGS (y) == 1))
3461 try_int = false;
3462 /* Not possible if the values are inherently not adjacent. */
3463 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3464 try_int = false;
3465 /* Is possible if both are registers (or subregs of registers). */
3466 else if (register_operand (x, mode) && register_operand (y, mode))
3467 try_int = true;
3468 /* If one of the operands is a memory, and alignment constraints
3469 are friendly enough, we may be able to do combined memory operations.
3470 We do not attempt this if Y is a constant because that combination is
3471 usually better with the by-parts thing below. */
3472 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3473 && (!STRICT_ALIGNMENT
3474 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3475 try_int = true;
3476 else
3477 try_int = false;
3479 if (try_int)
3481 rtx_insn *ret;
3483 /* For memory to memory moves, optimal behavior can be had with the
3484 existing block move logic. */
3485 if (MEM_P (x) && MEM_P (y))
3487 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3488 BLOCK_OP_NO_LIBCALL);
3489 return get_last_insn ();
3492 ret = emit_move_via_integer (mode, x, y, true);
3493 if (ret)
3494 return ret;
3497 return emit_move_complex_parts (x, y);
3500 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3501 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3503 static rtx_insn *
3504 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3506 rtx_insn *ret;
3508 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3509 if (mode != CCmode)
3511 enum insn_code code = optab_handler (mov_optab, CCmode);
3512 if (code != CODE_FOR_nothing)
3514 x = emit_move_change_mode (CCmode, mode, x, true);
3515 y = emit_move_change_mode (CCmode, mode, y, true);
3516 return emit_insn (GEN_FCN (code) (x, y));
3520 /* Otherwise, find the MODE_INT mode of the same width. */
3521 ret = emit_move_via_integer (mode, x, y, false);
3522 gcc_assert (ret != NULL);
3523 return ret;
3526 /* Return true if word I of OP lies entirely in the
3527 undefined bits of a paradoxical subreg. */
3529 static bool
3530 undefined_operand_subword_p (const_rtx op, int i)
3532 machine_mode innermode, innermostmode;
3533 int offset;
3534 if (GET_CODE (op) != SUBREG)
3535 return false;
3536 innermode = GET_MODE (op);
3537 innermostmode = GET_MODE (SUBREG_REG (op));
3538 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3539 /* The SUBREG_BYTE represents offset, as if the value were stored in
3540 memory, except for a paradoxical subreg where we define
3541 SUBREG_BYTE to be 0; undo this exception as in
3542 simplify_subreg. */
3543 if (SUBREG_BYTE (op) == 0
3544 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3546 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3547 if (WORDS_BIG_ENDIAN)
3548 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3549 if (BYTES_BIG_ENDIAN)
3550 offset += difference % UNITS_PER_WORD;
3552 if (offset >= GET_MODE_SIZE (innermostmode)
3553 || offset <= -GET_MODE_SIZE (word_mode))
3554 return true;
3555 return false;
3558 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3559 MODE is any multi-word or full-word mode that lacks a move_insn
3560 pattern. Note that you will get better code if you define such
3561 patterns, even if they must turn into multiple assembler instructions. */
3563 static rtx_insn *
3564 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3566 rtx_insn *last_insn = 0;
3567 rtx_insn *seq;
3568 rtx inner;
3569 bool need_clobber;
3570 int i;
3572 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3574 /* If X is a push on the stack, do the push now and replace
3575 X with a reference to the stack pointer. */
3576 if (push_operand (x, mode))
3577 x = emit_move_resolve_push (mode, x);
3579 /* If we are in reload, see if either operand is a MEM whose address
3580 is scheduled for replacement. */
3581 if (reload_in_progress && MEM_P (x)
3582 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3583 x = replace_equiv_address_nv (x, inner);
3584 if (reload_in_progress && MEM_P (y)
3585 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3586 y = replace_equiv_address_nv (y, inner);
3588 start_sequence ();
3590 need_clobber = false;
3591 for (i = 0;
3592 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3593 i++)
3595 rtx xpart = operand_subword (x, i, 1, mode);
3596 rtx ypart;
3598 /* Do not generate code for a move if it would come entirely
3599 from the undefined bits of a paradoxical subreg. */
3600 if (undefined_operand_subword_p (y, i))
3601 continue;
3603 ypart = operand_subword (y, i, 1, mode);
3605 /* If we can't get a part of Y, put Y into memory if it is a
3606 constant. Otherwise, force it into a register. Then we must
3607 be able to get a part of Y. */
3608 if (ypart == 0 && CONSTANT_P (y))
3610 y = use_anchored_address (force_const_mem (mode, y));
3611 ypart = operand_subword (y, i, 1, mode);
3613 else if (ypart == 0)
3614 ypart = operand_subword_force (y, i, mode);
3616 gcc_assert (xpart && ypart);
3618 need_clobber |= (GET_CODE (xpart) == SUBREG);
3620 last_insn = emit_move_insn (xpart, ypart);
3623 seq = get_insns ();
3624 end_sequence ();
3626 /* Show the output dies here. This is necessary for SUBREGs
3627 of pseudos since we cannot track their lifetimes correctly;
3628 hard regs shouldn't appear here except as return values.
3629 We never want to emit such a clobber after reload. */
3630 if (x != y
3631 && ! (reload_in_progress || reload_completed)
3632 && need_clobber != 0)
3633 emit_clobber (x);
3635 emit_insn (seq);
3637 return last_insn;
3640 /* Low level part of emit_move_insn.
3641 Called just like emit_move_insn, but assumes X and Y
3642 are basically valid. */
3644 rtx_insn *
3645 emit_move_insn_1 (rtx x, rtx y)
3647 machine_mode mode = GET_MODE (x);
3648 enum insn_code code;
3650 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3652 code = optab_handler (mov_optab, mode);
3653 if (code != CODE_FOR_nothing)
3654 return emit_insn (GEN_FCN (code) (x, y));
3656 /* Expand complex moves by moving real part and imag part. */
3657 if (COMPLEX_MODE_P (mode))
3658 return emit_move_complex (mode, x, y);
3660 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3661 || ALL_FIXED_POINT_MODE_P (mode))
3663 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3665 /* If we can't find an integer mode, use multi words. */
3666 if (result)
3667 return result;
3668 else
3669 return emit_move_multi_word (mode, x, y);
3672 if (GET_MODE_CLASS (mode) == MODE_CC)
3673 return emit_move_ccmode (mode, x, y);
3675 /* Try using a move pattern for the corresponding integer mode. This is
3676 only safe when simplify_subreg can convert MODE constants into integer
3677 constants. At present, it can only do this reliably if the value
3678 fits within a HOST_WIDE_INT. */
3679 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3681 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3683 if (ret)
3685 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3686 return ret;
3690 return emit_move_multi_word (mode, x, y);
3693 /* Generate code to copy Y into X.
3694 Both Y and X must have the same mode, except that
3695 Y can be a constant with VOIDmode.
3696 This mode cannot be BLKmode; use emit_block_move for that.
3698 Return the last instruction emitted. */
3700 rtx_insn *
3701 emit_move_insn (rtx x, rtx y)
3703 machine_mode mode = GET_MODE (x);
3704 rtx y_cst = NULL_RTX;
3705 rtx_insn *last_insn;
3706 rtx set;
3708 gcc_assert (mode != BLKmode
3709 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3711 if (CONSTANT_P (y))
3713 if (optimize
3714 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3715 && (last_insn = compress_float_constant (x, y)))
3716 return last_insn;
3718 y_cst = y;
3720 if (!targetm.legitimate_constant_p (mode, y))
3722 y = force_const_mem (mode, y);
3724 /* If the target's cannot_force_const_mem prevented the spill,
3725 assume that the target's move expanders will also take care
3726 of the non-legitimate constant. */
3727 if (!y)
3728 y = y_cst;
3729 else
3730 y = use_anchored_address (y);
3734 /* If X or Y are memory references, verify that their addresses are valid
3735 for the machine. */
3736 if (MEM_P (x)
3737 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3738 MEM_ADDR_SPACE (x))
3739 && ! push_operand (x, GET_MODE (x))))
3740 x = validize_mem (x);
3742 if (MEM_P (y)
3743 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3744 MEM_ADDR_SPACE (y)))
3745 y = validize_mem (y);
3747 gcc_assert (mode != BLKmode);
3749 last_insn = emit_move_insn_1 (x, y);
3751 if (y_cst && REG_P (x)
3752 && (set = single_set (last_insn)) != NULL_RTX
3753 && SET_DEST (set) == x
3754 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3755 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3757 return last_insn;
3760 /* Generate the body of an instruction to copy Y into X.
3761 It may be a list of insns, if one insn isn't enough. */
3763 rtx_insn *
3764 gen_move_insn (rtx x, rtx y)
3766 rtx_insn *seq;
3768 start_sequence ();
3769 emit_move_insn_1 (x, y);
3770 seq = get_insns ();
3771 end_sequence ();
3772 return seq;
3775 /* If Y is representable exactly in a narrower mode, and the target can
3776 perform the extension directly from constant or memory, then emit the
3777 move as an extension. */
3779 static rtx_insn *
3780 compress_float_constant (rtx x, rtx y)
3782 machine_mode dstmode = GET_MODE (x);
3783 machine_mode orig_srcmode = GET_MODE (y);
3784 machine_mode srcmode;
3785 const REAL_VALUE_TYPE *r;
3786 int oldcost, newcost;
3787 bool speed = optimize_insn_for_speed_p ();
3789 r = CONST_DOUBLE_REAL_VALUE (y);
3791 if (targetm.legitimate_constant_p (dstmode, y))
3792 oldcost = set_src_cost (y, orig_srcmode, speed);
3793 else
3794 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3796 FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3798 enum insn_code ic;
3799 rtx trunc_y;
3800 rtx_insn *last_insn;
3802 /* Skip if the target can't extend this way. */
3803 ic = can_extend_p (dstmode, srcmode, 0);
3804 if (ic == CODE_FOR_nothing)
3805 continue;
3807 /* Skip if the narrowed value isn't exact. */
3808 if (! exact_real_truncate (srcmode, r))
3809 continue;
3811 trunc_y = const_double_from_real_value (*r, srcmode);
3813 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3815 /* Skip if the target needs extra instructions to perform
3816 the extension. */
3817 if (!insn_operand_matches (ic, 1, trunc_y))
3818 continue;
3819 /* This is valid, but may not be cheaper than the original. */
3820 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3821 dstmode, speed);
3822 if (oldcost < newcost)
3823 continue;
3825 else if (float_extend_from_mem[dstmode][srcmode])
3827 trunc_y = force_const_mem (srcmode, trunc_y);
3828 /* This is valid, but may not be cheaper than the original. */
3829 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3830 dstmode, speed);
3831 if (oldcost < newcost)
3832 continue;
3833 trunc_y = validize_mem (trunc_y);
3835 else
3836 continue;
3838 /* For CSE's benefit, force the compressed constant pool entry
3839 into a new pseudo. This constant may be used in different modes,
3840 and if not, combine will put things back together for us. */
3841 trunc_y = force_reg (srcmode, trunc_y);
3843 /* If x is a hard register, perform the extension into a pseudo,
3844 so that e.g. stack realignment code is aware of it. */
3845 rtx target = x;
3846 if (REG_P (x) && HARD_REGISTER_P (x))
3847 target = gen_reg_rtx (dstmode);
3849 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3850 last_insn = get_last_insn ();
3852 if (REG_P (target))
3853 set_unique_reg_note (last_insn, REG_EQUAL, y);
3855 if (target != x)
3856 return emit_move_insn (x, target);
3857 return last_insn;
3860 return NULL;
3863 /* Pushing data onto the stack. */
3865 /* Push a block of length SIZE (perhaps variable)
3866 and return an rtx to address the beginning of the block.
3867 The value may be virtual_outgoing_args_rtx.
3869 EXTRA is the number of bytes of padding to push in addition to SIZE.
3870 BELOW nonzero means this padding comes at low addresses;
3871 otherwise, the padding comes at high addresses. */
3874 push_block (rtx size, int extra, int below)
3876 rtx temp;
3878 size = convert_modes (Pmode, ptr_mode, size, 1);
3879 if (CONSTANT_P (size))
3880 anti_adjust_stack (plus_constant (Pmode, size, extra));
3881 else if (REG_P (size) && extra == 0)
3882 anti_adjust_stack (size);
3883 else
3885 temp = copy_to_mode_reg (Pmode, size);
3886 if (extra != 0)
3887 temp = expand_binop (Pmode, add_optab, temp,
3888 gen_int_mode (extra, Pmode),
3889 temp, 0, OPTAB_LIB_WIDEN);
3890 anti_adjust_stack (temp);
3893 if (STACK_GROWS_DOWNWARD)
3895 temp = virtual_outgoing_args_rtx;
3896 if (extra != 0 && below)
3897 temp = plus_constant (Pmode, temp, extra);
3899 else
3901 if (CONST_INT_P (size))
3902 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3903 -INTVAL (size) - (below ? 0 : extra));
3904 else if (extra != 0 && !below)
3905 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3906 negate_rtx (Pmode, plus_constant (Pmode, size,
3907 extra)));
3908 else
3909 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3910 negate_rtx (Pmode, size));
3913 return memory_address (NARROWEST_INT_MODE, temp);
3916 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3918 static rtx
3919 mem_autoinc_base (rtx mem)
3921 if (MEM_P (mem))
3923 rtx addr = XEXP (mem, 0);
3924 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3925 return XEXP (addr, 0);
3927 return NULL;
3930 /* A utility routine used here, in reload, and in try_split. The insns
3931 after PREV up to and including LAST are known to adjust the stack,
3932 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3933 placing notes as appropriate. PREV may be NULL, indicating the
3934 entire insn sequence prior to LAST should be scanned.
3936 The set of allowed stack pointer modifications is small:
3937 (1) One or more auto-inc style memory references (aka pushes),
3938 (2) One or more addition/subtraction with the SP as destination,
3939 (3) A single move insn with the SP as destination,
3940 (4) A call_pop insn,
3941 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3943 Insns in the sequence that do not modify the SP are ignored,
3944 except for noreturn calls.
3946 The return value is the amount of adjustment that can be trivially
3947 verified, via immediate operand or auto-inc. If the adjustment
3948 cannot be trivially extracted, the return value is INT_MIN. */
3950 HOST_WIDE_INT
3951 find_args_size_adjust (rtx_insn *insn)
3953 rtx dest, set, pat;
3954 int i;
3956 pat = PATTERN (insn);
3957 set = NULL;
3959 /* Look for a call_pop pattern. */
3960 if (CALL_P (insn))
3962 /* We have to allow non-call_pop patterns for the case
3963 of emit_single_push_insn of a TLS address. */
3964 if (GET_CODE (pat) != PARALLEL)
3965 return 0;
3967 /* All call_pop have a stack pointer adjust in the parallel.
3968 The call itself is always first, and the stack adjust is
3969 usually last, so search from the end. */
3970 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3972 set = XVECEXP (pat, 0, i);
3973 if (GET_CODE (set) != SET)
3974 continue;
3975 dest = SET_DEST (set);
3976 if (dest == stack_pointer_rtx)
3977 break;
3979 /* We'd better have found the stack pointer adjust. */
3980 if (i == 0)
3981 return 0;
3982 /* Fall through to process the extracted SET and DEST
3983 as if it was a standalone insn. */
3985 else if (GET_CODE (pat) == SET)
3986 set = pat;
3987 else if ((set = single_set (insn)) != NULL)
3989 else if (GET_CODE (pat) == PARALLEL)
3991 /* ??? Some older ports use a parallel with a stack adjust
3992 and a store for a PUSH_ROUNDING pattern, rather than a
3993 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3994 /* ??? See h8300 and m68k, pushqi1. */
3995 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3997 set = XVECEXP (pat, 0, i);
3998 if (GET_CODE (set) != SET)
3999 continue;
4000 dest = SET_DEST (set);
4001 if (dest == stack_pointer_rtx)
4002 break;
4004 /* We do not expect an auto-inc of the sp in the parallel. */
4005 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4006 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4007 != stack_pointer_rtx);
4009 if (i < 0)
4010 return 0;
4012 else
4013 return 0;
4015 dest = SET_DEST (set);
4017 /* Look for direct modifications of the stack pointer. */
4018 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4020 /* Look for a trivial adjustment, otherwise assume nothing. */
4021 /* Note that the SPU restore_stack_block pattern refers to
4022 the stack pointer in V4SImode. Consider that non-trivial. */
4023 if (SCALAR_INT_MODE_P (GET_MODE (dest))
4024 && GET_CODE (SET_SRC (set)) == PLUS
4025 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
4026 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
4027 return INTVAL (XEXP (SET_SRC (set), 1));
4028 /* ??? Reload can generate no-op moves, which will be cleaned
4029 up later. Recognize it and continue searching. */
4030 else if (rtx_equal_p (dest, SET_SRC (set)))
4031 return 0;
4032 else
4033 return HOST_WIDE_INT_MIN;
4035 else
4037 rtx mem, addr;
4039 /* Otherwise only think about autoinc patterns. */
4040 if (mem_autoinc_base (dest) == stack_pointer_rtx)
4042 mem = dest;
4043 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4044 != stack_pointer_rtx);
4046 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4047 mem = SET_SRC (set);
4048 else
4049 return 0;
4051 addr = XEXP (mem, 0);
4052 switch (GET_CODE (addr))
4054 case PRE_INC:
4055 case POST_INC:
4056 return GET_MODE_SIZE (GET_MODE (mem));
4057 case PRE_DEC:
4058 case POST_DEC:
4059 return -GET_MODE_SIZE (GET_MODE (mem));
4060 case PRE_MODIFY:
4061 case POST_MODIFY:
4062 addr = XEXP (addr, 1);
4063 gcc_assert (GET_CODE (addr) == PLUS);
4064 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4065 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
4066 return INTVAL (XEXP (addr, 1));
4067 default:
4068 gcc_unreachable ();
4074 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
4076 int args_size = end_args_size;
4077 bool saw_unknown = false;
4078 rtx_insn *insn;
4080 for (insn = last; insn != prev; insn = PREV_INSN (insn))
4082 HOST_WIDE_INT this_delta;
4084 if (!NONDEBUG_INSN_P (insn))
4085 continue;
4087 this_delta = find_args_size_adjust (insn);
4088 if (this_delta == 0)
4090 if (!CALL_P (insn)
4091 || ACCUMULATE_OUTGOING_ARGS
4092 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4093 continue;
4096 gcc_assert (!saw_unknown);
4097 if (this_delta == HOST_WIDE_INT_MIN)
4098 saw_unknown = true;
4100 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
4101 if (STACK_GROWS_DOWNWARD)
4102 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4104 args_size -= this_delta;
4107 return saw_unknown ? INT_MIN : args_size;
4110 #ifdef PUSH_ROUNDING
4111 /* Emit single push insn. */
4113 static void
4114 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4116 rtx dest_addr;
4117 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4118 rtx dest;
4119 enum insn_code icode;
4121 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4122 /* If there is push pattern, use it. Otherwise try old way of throwing
4123 MEM representing push operation to move expander. */
4124 icode = optab_handler (push_optab, mode);
4125 if (icode != CODE_FOR_nothing)
4127 struct expand_operand ops[1];
4129 create_input_operand (&ops[0], x, mode);
4130 if (maybe_expand_insn (icode, 1, ops))
4131 return;
4133 if (GET_MODE_SIZE (mode) == rounded_size)
4134 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4135 /* If we are to pad downward, adjust the stack pointer first and
4136 then store X into the stack location using an offset. This is
4137 because emit_move_insn does not know how to pad; it does not have
4138 access to type. */
4139 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4141 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4142 HOST_WIDE_INT offset;
4144 emit_move_insn (stack_pointer_rtx,
4145 expand_binop (Pmode,
4146 STACK_GROWS_DOWNWARD ? sub_optab
4147 : add_optab,
4148 stack_pointer_rtx,
4149 gen_int_mode (rounded_size, Pmode),
4150 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4152 offset = (HOST_WIDE_INT) padding_size;
4153 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4154 /* We have already decremented the stack pointer, so get the
4155 previous value. */
4156 offset += (HOST_WIDE_INT) rounded_size;
4158 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4159 /* We have already incremented the stack pointer, so get the
4160 previous value. */
4161 offset -= (HOST_WIDE_INT) rounded_size;
4163 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4164 gen_int_mode (offset, Pmode));
4166 else
4168 if (STACK_GROWS_DOWNWARD)
4169 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4170 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4171 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4172 Pmode));
4173 else
4174 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4175 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4176 gen_int_mode (rounded_size, Pmode));
4178 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4181 dest = gen_rtx_MEM (mode, dest_addr);
4183 if (type != 0)
4185 set_mem_attributes (dest, type, 1);
4187 if (cfun->tail_call_marked)
4188 /* Function incoming arguments may overlap with sibling call
4189 outgoing arguments and we cannot allow reordering of reads
4190 from function arguments with stores to outgoing arguments
4191 of sibling calls. */
4192 set_mem_alias_set (dest, 0);
4194 emit_move_insn (dest, x);
4197 /* Emit and annotate a single push insn. */
4199 static void
4200 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4202 int delta, old_delta = stack_pointer_delta;
4203 rtx_insn *prev = get_last_insn ();
4204 rtx_insn *last;
4206 emit_single_push_insn_1 (mode, x, type);
4208 last = get_last_insn ();
4210 /* Notice the common case where we emitted exactly one insn. */
4211 if (PREV_INSN (last) == prev)
4213 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4214 return;
4217 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4218 gcc_assert (delta == INT_MIN || delta == old_delta);
4220 #endif
4222 /* If reading SIZE bytes from X will end up reading from
4223 Y return the number of bytes that overlap. Return -1
4224 if there is no overlap or -2 if we can't determine
4225 (for example when X and Y have different base registers). */
4227 static int
4228 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4230 rtx tmp = plus_constant (Pmode, x, size);
4231 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4233 if (!CONST_INT_P (sub))
4234 return -2;
4236 HOST_WIDE_INT val = INTVAL (sub);
4238 return IN_RANGE (val, 1, size) ? val : -1;
4241 /* Generate code to push X onto the stack, assuming it has mode MODE and
4242 type TYPE.
4243 MODE is redundant except when X is a CONST_INT (since they don't
4244 carry mode info).
4245 SIZE is an rtx for the size of data to be copied (in bytes),
4246 needed only if X is BLKmode.
4247 Return true if successful. May return false if asked to push a
4248 partial argument during a sibcall optimization (as specified by
4249 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4250 to not overlap.
4252 ALIGN (in bits) is maximum alignment we can assume.
4254 If PARTIAL and REG are both nonzero, then copy that many of the first
4255 bytes of X into registers starting with REG, and push the rest of X.
4256 The amount of space pushed is decreased by PARTIAL bytes.
4257 REG must be a hard register in this case.
4258 If REG is zero but PARTIAL is not, take any all others actions for an
4259 argument partially in registers, but do not actually load any
4260 registers.
4262 EXTRA is the amount in bytes of extra space to leave next to this arg.
4263 This is ignored if an argument block has already been allocated.
4265 On a machine that lacks real push insns, ARGS_ADDR is the address of
4266 the bottom of the argument block for this call. We use indexing off there
4267 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4268 argument block has not been preallocated.
4270 ARGS_SO_FAR is the size of args previously pushed for this call.
4272 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4273 for arguments passed in registers. If nonzero, it will be the number
4274 of bytes required. */
4276 bool
4277 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4278 unsigned int align, int partial, rtx reg, int extra,
4279 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4280 rtx alignment_pad, bool sibcall_p)
4282 rtx xinner;
4283 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4285 /* Decide where to pad the argument: `downward' for below,
4286 `upward' for above, or `none' for don't pad it.
4287 Default is below for small data on big-endian machines; else above. */
4288 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4290 /* Invert direction if stack is post-decrement.
4291 FIXME: why? */
4292 if (STACK_PUSH_CODE == POST_DEC)
4293 if (where_pad != none)
4294 where_pad = (where_pad == downward ? upward : downward);
4296 xinner = x;
4298 int nregs = partial / UNITS_PER_WORD;
4299 rtx *tmp_regs = NULL;
4300 int overlapping = 0;
4302 if (mode == BLKmode
4303 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4305 /* Copy a block into the stack, entirely or partially. */
4307 rtx temp;
4308 int used;
4309 int offset;
4310 int skip;
4312 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4313 used = partial - offset;
4315 if (mode != BLKmode)
4317 /* A value is to be stored in an insufficiently aligned
4318 stack slot; copy via a suitably aligned slot if
4319 necessary. */
4320 size = GEN_INT (GET_MODE_SIZE (mode));
4321 if (!MEM_P (xinner))
4323 temp = assign_temp (type, 1, 1);
4324 emit_move_insn (temp, xinner);
4325 xinner = temp;
4329 gcc_assert (size);
4331 /* USED is now the # of bytes we need not copy to the stack
4332 because registers will take care of them. */
4334 if (partial != 0)
4335 xinner = adjust_address (xinner, BLKmode, used);
4337 /* If the partial register-part of the arg counts in its stack size,
4338 skip the part of stack space corresponding to the registers.
4339 Otherwise, start copying to the beginning of the stack space,
4340 by setting SKIP to 0. */
4341 skip = (reg_parm_stack_space == 0) ? 0 : used;
4343 #ifdef PUSH_ROUNDING
4344 /* Do it with several push insns if that doesn't take lots of insns
4345 and if there is no difficulty with push insns that skip bytes
4346 on the stack for alignment purposes. */
4347 if (args_addr == 0
4348 && PUSH_ARGS
4349 && CONST_INT_P (size)
4350 && skip == 0
4351 && MEM_ALIGN (xinner) >= align
4352 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4353 /* Here we avoid the case of a structure whose weak alignment
4354 forces many pushes of a small amount of data,
4355 and such small pushes do rounding that causes trouble. */
4356 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4357 || align >= BIGGEST_ALIGNMENT
4358 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4359 == (align / BITS_PER_UNIT)))
4360 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4362 /* Push padding now if padding above and stack grows down,
4363 or if padding below and stack grows up.
4364 But if space already allocated, this has already been done. */
4365 if (extra && args_addr == 0
4366 && where_pad != none && where_pad != stack_direction)
4367 anti_adjust_stack (GEN_INT (extra));
4369 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4371 else
4372 #endif /* PUSH_ROUNDING */
4374 rtx target;
4376 /* Otherwise make space on the stack and copy the data
4377 to the address of that space. */
4379 /* Deduct words put into registers from the size we must copy. */
4380 if (partial != 0)
4382 if (CONST_INT_P (size))
4383 size = GEN_INT (INTVAL (size) - used);
4384 else
4385 size = expand_binop (GET_MODE (size), sub_optab, size,
4386 gen_int_mode (used, GET_MODE (size)),
4387 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4390 /* Get the address of the stack space.
4391 In this case, we do not deal with EXTRA separately.
4392 A single stack adjust will do. */
4393 if (! args_addr)
4395 temp = push_block (size, extra, where_pad == downward);
4396 extra = 0;
4398 else if (CONST_INT_P (args_so_far))
4399 temp = memory_address (BLKmode,
4400 plus_constant (Pmode, args_addr,
4401 skip + INTVAL (args_so_far)));
4402 else
4403 temp = memory_address (BLKmode,
4404 plus_constant (Pmode,
4405 gen_rtx_PLUS (Pmode,
4406 args_addr,
4407 args_so_far),
4408 skip));
4410 if (!ACCUMULATE_OUTGOING_ARGS)
4412 /* If the source is referenced relative to the stack pointer,
4413 copy it to another register to stabilize it. We do not need
4414 to do this if we know that we won't be changing sp. */
4416 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4417 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4418 temp = copy_to_reg (temp);
4421 target = gen_rtx_MEM (BLKmode, temp);
4423 /* We do *not* set_mem_attributes here, because incoming arguments
4424 may overlap with sibling call outgoing arguments and we cannot
4425 allow reordering of reads from function arguments with stores
4426 to outgoing arguments of sibling calls. We do, however, want
4427 to record the alignment of the stack slot. */
4428 /* ALIGN may well be better aligned than TYPE, e.g. due to
4429 PARM_BOUNDARY. Assume the caller isn't lying. */
4430 set_mem_align (target, align);
4432 /* If part should go in registers and pushing to that part would
4433 overwrite some of the values that need to go into regs, load the
4434 overlapping values into temporary pseudos to be moved into the hard
4435 regs at the end after the stack pushing has completed.
4436 We cannot load them directly into the hard regs here because
4437 they can be clobbered by the block move expansions.
4438 See PR 65358. */
4440 if (partial > 0 && reg != 0 && mode == BLKmode
4441 && GET_CODE (reg) != PARALLEL)
4443 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4444 if (overlapping > 0)
4446 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4447 overlapping /= UNITS_PER_WORD;
4449 tmp_regs = XALLOCAVEC (rtx, overlapping);
4451 for (int i = 0; i < overlapping; i++)
4452 tmp_regs[i] = gen_reg_rtx (word_mode);
4454 for (int i = 0; i < overlapping; i++)
4455 emit_move_insn (tmp_regs[i],
4456 operand_subword_force (target, i, mode));
4458 else if (overlapping == -1)
4459 overlapping = 0;
4460 /* Could not determine whether there is overlap.
4461 Fail the sibcall. */
4462 else
4464 overlapping = 0;
4465 if (sibcall_p)
4466 return false;
4469 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4472 else if (partial > 0)
4474 /* Scalar partly in registers. */
4476 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4477 int i;
4478 int not_stack;
4479 /* # bytes of start of argument
4480 that we must make space for but need not store. */
4481 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4482 int args_offset = INTVAL (args_so_far);
4483 int skip;
4485 /* Push padding now if padding above and stack grows down,
4486 or if padding below and stack grows up.
4487 But if space already allocated, this has already been done. */
4488 if (extra && args_addr == 0
4489 && where_pad != none && where_pad != stack_direction)
4490 anti_adjust_stack (GEN_INT (extra));
4492 /* If we make space by pushing it, we might as well push
4493 the real data. Otherwise, we can leave OFFSET nonzero
4494 and leave the space uninitialized. */
4495 if (args_addr == 0)
4496 offset = 0;
4498 /* Now NOT_STACK gets the number of words that we don't need to
4499 allocate on the stack. Convert OFFSET to words too. */
4500 not_stack = (partial - offset) / UNITS_PER_WORD;
4501 offset /= UNITS_PER_WORD;
4503 /* If the partial register-part of the arg counts in its stack size,
4504 skip the part of stack space corresponding to the registers.
4505 Otherwise, start copying to the beginning of the stack space,
4506 by setting SKIP to 0. */
4507 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4509 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4510 x = validize_mem (force_const_mem (mode, x));
4512 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4513 SUBREGs of such registers are not allowed. */
4514 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4515 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4516 x = copy_to_reg (x);
4518 /* Loop over all the words allocated on the stack for this arg. */
4519 /* We can do it by words, because any scalar bigger than a word
4520 has a size a multiple of a word. */
4521 for (i = size - 1; i >= not_stack; i--)
4522 if (i >= not_stack + offset)
4523 if (!emit_push_insn (operand_subword_force (x, i, mode),
4524 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4525 0, args_addr,
4526 GEN_INT (args_offset + ((i - not_stack + skip)
4527 * UNITS_PER_WORD)),
4528 reg_parm_stack_space, alignment_pad, sibcall_p))
4529 return false;
4531 else
4533 rtx addr;
4534 rtx dest;
4536 /* Push padding now if padding above and stack grows down,
4537 or if padding below and stack grows up.
4538 But if space already allocated, this has already been done. */
4539 if (extra && args_addr == 0
4540 && where_pad != none && where_pad != stack_direction)
4541 anti_adjust_stack (GEN_INT (extra));
4543 #ifdef PUSH_ROUNDING
4544 if (args_addr == 0 && PUSH_ARGS)
4545 emit_single_push_insn (mode, x, type);
4546 else
4547 #endif
4549 if (CONST_INT_P (args_so_far))
4550 addr
4551 = memory_address (mode,
4552 plus_constant (Pmode, args_addr,
4553 INTVAL (args_so_far)));
4554 else
4555 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4556 args_so_far));
4557 dest = gen_rtx_MEM (mode, addr);
4559 /* We do *not* set_mem_attributes here, because incoming arguments
4560 may overlap with sibling call outgoing arguments and we cannot
4561 allow reordering of reads from function arguments with stores
4562 to outgoing arguments of sibling calls. We do, however, want
4563 to record the alignment of the stack slot. */
4564 /* ALIGN may well be better aligned than TYPE, e.g. due to
4565 PARM_BOUNDARY. Assume the caller isn't lying. */
4566 set_mem_align (dest, align);
4568 emit_move_insn (dest, x);
4572 /* Move the partial arguments into the registers and any overlapping
4573 values that we moved into the pseudos in tmp_regs. */
4574 if (partial > 0 && reg != 0)
4576 /* Handle calls that pass values in multiple non-contiguous locations.
4577 The Irix 6 ABI has examples of this. */
4578 if (GET_CODE (reg) == PARALLEL)
4579 emit_group_load (reg, x, type, -1);
4580 else
4582 gcc_assert (partial % UNITS_PER_WORD == 0);
4583 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4585 for (int i = 0; i < overlapping; i++)
4586 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4587 + nregs - overlapping + i),
4588 tmp_regs[i]);
4593 if (extra && args_addr == 0 && where_pad == stack_direction)
4594 anti_adjust_stack (GEN_INT (extra));
4596 if (alignment_pad && args_addr == 0)
4597 anti_adjust_stack (alignment_pad);
4599 return true;
4602 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4603 operations. */
4605 static rtx
4606 get_subtarget (rtx x)
4608 return (optimize
4609 || x == 0
4610 /* Only registers can be subtargets. */
4611 || !REG_P (x)
4612 /* Don't use hard regs to avoid extending their life. */
4613 || REGNO (x) < FIRST_PSEUDO_REGISTER
4614 ? 0 : x);
4617 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4618 FIELD is a bitfield. Returns true if the optimization was successful,
4619 and there's nothing else to do. */
4621 static bool
4622 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4623 unsigned HOST_WIDE_INT bitpos,
4624 unsigned HOST_WIDE_INT bitregion_start,
4625 unsigned HOST_WIDE_INT bitregion_end,
4626 machine_mode mode1, rtx str_rtx,
4627 tree to, tree src, bool reverse)
4629 machine_mode str_mode = GET_MODE (str_rtx);
4630 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4631 tree op0, op1;
4632 rtx value, result;
4633 optab binop;
4634 gimple *srcstmt;
4635 enum tree_code code;
4637 if (mode1 != VOIDmode
4638 || bitsize >= BITS_PER_WORD
4639 || str_bitsize > BITS_PER_WORD
4640 || TREE_SIDE_EFFECTS (to)
4641 || TREE_THIS_VOLATILE (to))
4642 return false;
4644 STRIP_NOPS (src);
4645 if (TREE_CODE (src) != SSA_NAME)
4646 return false;
4647 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4648 return false;
4650 srcstmt = get_gimple_for_ssa_name (src);
4651 if (!srcstmt
4652 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4653 return false;
4655 code = gimple_assign_rhs_code (srcstmt);
4657 op0 = gimple_assign_rhs1 (srcstmt);
4659 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4660 to find its initialization. Hopefully the initialization will
4661 be from a bitfield load. */
4662 if (TREE_CODE (op0) == SSA_NAME)
4664 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4666 /* We want to eventually have OP0 be the same as TO, which
4667 should be a bitfield. */
4668 if (!op0stmt
4669 || !is_gimple_assign (op0stmt)
4670 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4671 return false;
4672 op0 = gimple_assign_rhs1 (op0stmt);
4675 op1 = gimple_assign_rhs2 (srcstmt);
4677 if (!operand_equal_p (to, op0, 0))
4678 return false;
4680 if (MEM_P (str_rtx))
4682 unsigned HOST_WIDE_INT offset1;
4684 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4685 str_mode = word_mode;
4686 str_mode = get_best_mode (bitsize, bitpos,
4687 bitregion_start, bitregion_end,
4688 MEM_ALIGN (str_rtx), str_mode, 0);
4689 if (str_mode == VOIDmode)
4690 return false;
4691 str_bitsize = GET_MODE_BITSIZE (str_mode);
4693 offset1 = bitpos;
4694 bitpos %= str_bitsize;
4695 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4696 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4698 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4699 return false;
4700 else
4701 gcc_assert (!reverse);
4703 /* If the bit field covers the whole REG/MEM, store_field
4704 will likely generate better code. */
4705 if (bitsize >= str_bitsize)
4706 return false;
4708 /* We can't handle fields split across multiple entities. */
4709 if (bitpos + bitsize > str_bitsize)
4710 return false;
4712 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4713 bitpos = str_bitsize - bitpos - bitsize;
4715 switch (code)
4717 case PLUS_EXPR:
4718 case MINUS_EXPR:
4719 /* For now, just optimize the case of the topmost bitfield
4720 where we don't need to do any masking and also
4721 1 bit bitfields where xor can be used.
4722 We might win by one instruction for the other bitfields
4723 too if insv/extv instructions aren't used, so that
4724 can be added later. */
4725 if ((reverse || bitpos + bitsize != str_bitsize)
4726 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4727 break;
4729 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4730 value = convert_modes (str_mode,
4731 TYPE_MODE (TREE_TYPE (op1)), value,
4732 TYPE_UNSIGNED (TREE_TYPE (op1)));
4734 /* We may be accessing data outside the field, which means
4735 we can alias adjacent data. */
4736 if (MEM_P (str_rtx))
4738 str_rtx = shallow_copy_rtx (str_rtx);
4739 set_mem_alias_set (str_rtx, 0);
4740 set_mem_expr (str_rtx, 0);
4743 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4745 value = expand_and (str_mode, value, const1_rtx, NULL);
4746 binop = xor_optab;
4748 else
4749 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4751 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4752 if (reverse)
4753 value = flip_storage_order (str_mode, value);
4754 result = expand_binop (str_mode, binop, str_rtx,
4755 value, str_rtx, 1, OPTAB_WIDEN);
4756 if (result != str_rtx)
4757 emit_move_insn (str_rtx, result);
4758 return true;
4760 case BIT_IOR_EXPR:
4761 case BIT_XOR_EXPR:
4762 if (TREE_CODE (op1) != INTEGER_CST)
4763 break;
4764 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4765 value = convert_modes (str_mode,
4766 TYPE_MODE (TREE_TYPE (op1)), value,
4767 TYPE_UNSIGNED (TREE_TYPE (op1)));
4769 /* We may be accessing data outside the field, which means
4770 we can alias adjacent data. */
4771 if (MEM_P (str_rtx))
4773 str_rtx = shallow_copy_rtx (str_rtx);
4774 set_mem_alias_set (str_rtx, 0);
4775 set_mem_expr (str_rtx, 0);
4778 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4779 if (bitpos + bitsize != str_bitsize)
4781 rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4782 str_mode);
4783 value = expand_and (str_mode, value, mask, NULL_RTX);
4785 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4786 if (reverse)
4787 value = flip_storage_order (str_mode, value);
4788 result = expand_binop (str_mode, binop, str_rtx,
4789 value, str_rtx, 1, OPTAB_WIDEN);
4790 if (result != str_rtx)
4791 emit_move_insn (str_rtx, result);
4792 return true;
4794 default:
4795 break;
4798 return false;
4801 /* In the C++ memory model, consecutive bit fields in a structure are
4802 considered one memory location.
4804 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4805 returns the bit range of consecutive bits in which this COMPONENT_REF
4806 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4807 and *OFFSET may be adjusted in the process.
4809 If the access does not need to be restricted, 0 is returned in both
4810 *BITSTART and *BITEND. */
4812 void
4813 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4814 unsigned HOST_WIDE_INT *bitend,
4815 tree exp,
4816 HOST_WIDE_INT *bitpos,
4817 tree *offset)
4819 HOST_WIDE_INT bitoffset;
4820 tree field, repr;
4822 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4824 field = TREE_OPERAND (exp, 1);
4825 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4826 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4827 need to limit the range we can access. */
4828 if (!repr)
4830 *bitstart = *bitend = 0;
4831 return;
4834 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4835 part of a larger bit field, then the representative does not serve any
4836 useful purpose. This can occur in Ada. */
4837 if (handled_component_p (TREE_OPERAND (exp, 0)))
4839 machine_mode rmode;
4840 HOST_WIDE_INT rbitsize, rbitpos;
4841 tree roffset;
4842 int unsignedp, reversep, volatilep = 0;
4843 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4844 &roffset, &rmode, &unsignedp, &reversep,
4845 &volatilep);
4846 if ((rbitpos % BITS_PER_UNIT) != 0)
4848 *bitstart = *bitend = 0;
4849 return;
4853 /* Compute the adjustment to bitpos from the offset of the field
4854 relative to the representative. DECL_FIELD_OFFSET of field and
4855 repr are the same by construction if they are not constants,
4856 see finish_bitfield_layout. */
4857 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4858 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4859 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4860 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4861 else
4862 bitoffset = 0;
4863 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4864 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4866 /* If the adjustment is larger than bitpos, we would have a negative bit
4867 position for the lower bound and this may wreak havoc later. Adjust
4868 offset and bitpos to make the lower bound non-negative in that case. */
4869 if (bitoffset > *bitpos)
4871 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4872 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4874 *bitpos += adjust;
4875 if (*offset == NULL_TREE)
4876 *offset = size_int (-adjust / BITS_PER_UNIT);
4877 else
4878 *offset
4879 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4880 *bitstart = 0;
4882 else
4883 *bitstart = *bitpos - bitoffset;
4885 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4888 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4889 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4890 DECL_RTL was not set yet, return NORTL. */
4892 static inline bool
4893 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4895 if (TREE_CODE (addr) != ADDR_EXPR)
4896 return false;
4898 tree base = TREE_OPERAND (addr, 0);
4900 if (!DECL_P (base)
4901 || TREE_ADDRESSABLE (base)
4902 || DECL_MODE (base) == BLKmode)
4903 return false;
4905 if (!DECL_RTL_SET_P (base))
4906 return nortl;
4908 return (!MEM_P (DECL_RTL (base)));
4911 /* Returns true if the MEM_REF REF refers to an object that does not
4912 reside in memory and has non-BLKmode. */
4914 static inline bool
4915 mem_ref_refers_to_non_mem_p (tree ref)
4917 tree base = TREE_OPERAND (ref, 0);
4918 return addr_expr_of_non_mem_decl_p_1 (base, false);
4921 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4922 is true, try generating a nontemporal store. */
4924 void
4925 expand_assignment (tree to, tree from, bool nontemporal)
4927 rtx to_rtx = 0;
4928 rtx result;
4929 machine_mode mode;
4930 unsigned int align;
4931 enum insn_code icode;
4933 /* Don't crash if the lhs of the assignment was erroneous. */
4934 if (TREE_CODE (to) == ERROR_MARK)
4936 expand_normal (from);
4937 return;
4940 /* Optimize away no-op moves without side-effects. */
4941 if (operand_equal_p (to, from, 0))
4942 return;
4944 /* Handle misaligned stores. */
4945 mode = TYPE_MODE (TREE_TYPE (to));
4946 if ((TREE_CODE (to) == MEM_REF
4947 || TREE_CODE (to) == TARGET_MEM_REF)
4948 && mode != BLKmode
4949 && !mem_ref_refers_to_non_mem_p (to)
4950 && ((align = get_object_alignment (to))
4951 < GET_MODE_ALIGNMENT (mode))
4952 && (((icode = optab_handler (movmisalign_optab, mode))
4953 != CODE_FOR_nothing)
4954 || SLOW_UNALIGNED_ACCESS (mode, align)))
4956 rtx reg, mem;
4958 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4959 reg = force_not_mem (reg);
4960 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4961 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4962 reg = flip_storage_order (mode, reg);
4964 if (icode != CODE_FOR_nothing)
4966 struct expand_operand ops[2];
4968 create_fixed_operand (&ops[0], mem);
4969 create_input_operand (&ops[1], reg, mode);
4970 /* The movmisalign<mode> pattern cannot fail, else the assignment
4971 would silently be omitted. */
4972 expand_insn (icode, 2, ops);
4974 else
4975 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4976 false);
4977 return;
4980 /* Assignment of a structure component needs special treatment
4981 if the structure component's rtx is not simply a MEM.
4982 Assignment of an array element at a constant index, and assignment of
4983 an array element in an unaligned packed structure field, has the same
4984 problem. Same for (partially) storing into a non-memory object. */
4985 if (handled_component_p (to)
4986 || (TREE_CODE (to) == MEM_REF
4987 && (REF_REVERSE_STORAGE_ORDER (to)
4988 || mem_ref_refers_to_non_mem_p (to)))
4989 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4991 machine_mode mode1;
4992 HOST_WIDE_INT bitsize, bitpos;
4993 unsigned HOST_WIDE_INT bitregion_start = 0;
4994 unsigned HOST_WIDE_INT bitregion_end = 0;
4995 tree offset;
4996 int unsignedp, reversep, volatilep = 0;
4997 tree tem;
4999 push_temp_slots ();
5000 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
5001 &unsignedp, &reversep, &volatilep);
5003 /* Make sure bitpos is not negative, it can wreak havoc later. */
5004 if (bitpos < 0)
5006 gcc_assert (offset == NULL_TREE);
5007 offset = size_int (bitpos >> LOG2_BITS_PER_UNIT);
5008 bitpos &= BITS_PER_UNIT - 1;
5011 if (TREE_CODE (to) == COMPONENT_REF
5012 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5013 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5014 /* The C++ memory model naturally applies to byte-aligned fields.
5015 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5016 BITSIZE are not byte-aligned, there is no need to limit the range
5017 we can access. This can occur with packed structures in Ada. */
5018 else if (bitsize > 0
5019 && bitsize % BITS_PER_UNIT == 0
5020 && bitpos % BITS_PER_UNIT == 0)
5022 bitregion_start = bitpos;
5023 bitregion_end = bitpos + bitsize - 1;
5026 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5028 /* If the field has a mode, we want to access it in the
5029 field's mode, not the computed mode.
5030 If a MEM has VOIDmode (external with incomplete type),
5031 use BLKmode for it instead. */
5032 if (MEM_P (to_rtx))
5034 if (mode1 != VOIDmode)
5035 to_rtx = adjust_address (to_rtx, mode1, 0);
5036 else if (GET_MODE (to_rtx) == VOIDmode)
5037 to_rtx = adjust_address (to_rtx, BLKmode, 0);
5040 if (offset != 0)
5042 machine_mode address_mode;
5043 rtx offset_rtx;
5045 if (!MEM_P (to_rtx))
5047 /* We can get constant negative offsets into arrays with broken
5048 user code. Translate this to a trap instead of ICEing. */
5049 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5050 expand_builtin_trap ();
5051 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5054 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5055 address_mode = get_address_mode (to_rtx);
5056 if (GET_MODE (offset_rtx) != address_mode)
5058 /* We cannot be sure that the RTL in offset_rtx is valid outside
5059 of a memory address context, so force it into a register
5060 before attempting to convert it to the desired mode. */
5061 offset_rtx = force_operand (offset_rtx, NULL_RTX);
5062 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5065 /* If we have an expression in OFFSET_RTX and a non-zero
5066 byte offset in BITPOS, adding the byte offset before the
5067 OFFSET_RTX results in better intermediate code, which makes
5068 later rtl optimization passes perform better.
5070 We prefer intermediate code like this:
5072 r124:DI=r123:DI+0x18
5073 [r124:DI]=r121:DI
5075 ... instead of ...
5077 r124:DI=r123:DI+0x10
5078 [r124:DI+0x8]=r121:DI
5080 This is only done for aligned data values, as these can
5081 be expected to result in single move instructions. */
5082 if (mode1 != VOIDmode
5083 && bitpos != 0
5084 && bitsize > 0
5085 && (bitpos % bitsize) == 0
5086 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
5087 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5089 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
5090 bitregion_start = 0;
5091 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
5092 bitregion_end -= bitpos;
5093 bitpos = 0;
5096 to_rtx = offset_address (to_rtx, offset_rtx,
5097 highest_pow2_factor_for_target (to,
5098 offset));
5101 /* No action is needed if the target is not a memory and the field
5102 lies completely outside that target. This can occur if the source
5103 code contains an out-of-bounds access to a small array. */
5104 if (!MEM_P (to_rtx)
5105 && GET_MODE (to_rtx) != BLKmode
5106 && (unsigned HOST_WIDE_INT) bitpos
5107 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
5109 expand_normal (from);
5110 result = NULL;
5112 /* Handle expand_expr of a complex value returning a CONCAT. */
5113 else if (GET_CODE (to_rtx) == CONCAT)
5115 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
5116 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
5117 && bitpos == 0
5118 && bitsize == mode_bitsize)
5119 result = store_expr (from, to_rtx, false, nontemporal, reversep);
5120 else if (bitsize == mode_bitsize / 2
5121 && (bitpos == 0 || bitpos == mode_bitsize / 2))
5122 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
5123 nontemporal, reversep);
5124 else if (bitpos + bitsize <= mode_bitsize / 2)
5125 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5126 bitregion_start, bitregion_end,
5127 mode1, from, get_alias_set (to),
5128 nontemporal, reversep);
5129 else if (bitpos >= mode_bitsize / 2)
5130 result = store_field (XEXP (to_rtx, 1), bitsize,
5131 bitpos - mode_bitsize / 2,
5132 bitregion_start, bitregion_end,
5133 mode1, from, get_alias_set (to),
5134 nontemporal, reversep);
5135 else if (bitpos == 0 && bitsize == mode_bitsize)
5137 rtx from_rtx;
5138 result = expand_normal (from);
5139 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
5140 TYPE_MODE (TREE_TYPE (from)), 0);
5141 emit_move_insn (XEXP (to_rtx, 0),
5142 read_complex_part (from_rtx, false));
5143 emit_move_insn (XEXP (to_rtx, 1),
5144 read_complex_part (from_rtx, true));
5146 else
5148 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5149 GET_MODE_SIZE (GET_MODE (to_rtx)));
5150 write_complex_part (temp, XEXP (to_rtx, 0), false);
5151 write_complex_part (temp, XEXP (to_rtx, 1), true);
5152 result = store_field (temp, bitsize, bitpos,
5153 bitregion_start, bitregion_end,
5154 mode1, from, get_alias_set (to),
5155 nontemporal, reversep);
5156 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5157 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5160 else
5162 if (MEM_P (to_rtx))
5164 /* If the field is at offset zero, we could have been given the
5165 DECL_RTX of the parent struct. Don't munge it. */
5166 to_rtx = shallow_copy_rtx (to_rtx);
5167 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5168 if (volatilep)
5169 MEM_VOLATILE_P (to_rtx) = 1;
5172 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5173 bitregion_start, bitregion_end,
5174 mode1, to_rtx, to, from,
5175 reversep))
5176 result = NULL;
5177 else
5178 result = store_field (to_rtx, bitsize, bitpos,
5179 bitregion_start, bitregion_end,
5180 mode1, from, get_alias_set (to),
5181 nontemporal, reversep);
5184 if (result)
5185 preserve_temp_slots (result);
5186 pop_temp_slots ();
5187 return;
5190 /* If the rhs is a function call and its value is not an aggregate,
5191 call the function before we start to compute the lhs.
5192 This is needed for correct code for cases such as
5193 val = setjmp (buf) on machines where reference to val
5194 requires loading up part of an address in a separate insn.
5196 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5197 since it might be a promoted variable where the zero- or sign- extension
5198 needs to be done. Handling this in the normal way is safe because no
5199 computation is done before the call. The same is true for SSA names. */
5200 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5201 && COMPLETE_TYPE_P (TREE_TYPE (from))
5202 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5203 && ! (((VAR_P (to)
5204 || TREE_CODE (to) == PARM_DECL
5205 || TREE_CODE (to) == RESULT_DECL)
5206 && REG_P (DECL_RTL (to)))
5207 || TREE_CODE (to) == SSA_NAME))
5209 rtx value;
5210 rtx bounds;
5212 push_temp_slots ();
5213 value = expand_normal (from);
5215 /* Split value and bounds to store them separately. */
5216 chkp_split_slot (value, &value, &bounds);
5218 if (to_rtx == 0)
5219 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5221 /* Handle calls that return values in multiple non-contiguous locations.
5222 The Irix 6 ABI has examples of this. */
5223 if (GET_CODE (to_rtx) == PARALLEL)
5225 if (GET_CODE (value) == PARALLEL)
5226 emit_group_move (to_rtx, value);
5227 else
5228 emit_group_load (to_rtx, value, TREE_TYPE (from),
5229 int_size_in_bytes (TREE_TYPE (from)));
5231 else if (GET_CODE (value) == PARALLEL)
5232 emit_group_store (to_rtx, value, TREE_TYPE (from),
5233 int_size_in_bytes (TREE_TYPE (from)));
5234 else if (GET_MODE (to_rtx) == BLKmode)
5236 /* Handle calls that return BLKmode values in registers. */
5237 if (REG_P (value))
5238 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5239 else
5240 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5242 else
5244 if (POINTER_TYPE_P (TREE_TYPE (to)))
5245 value = convert_memory_address_addr_space
5246 (GET_MODE (to_rtx), value,
5247 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5249 emit_move_insn (to_rtx, value);
5252 /* Store bounds if required. */
5253 if (bounds
5254 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5256 gcc_assert (MEM_P (to_rtx));
5257 chkp_emit_bounds_store (bounds, value, to_rtx);
5260 preserve_temp_slots (to_rtx);
5261 pop_temp_slots ();
5262 return;
5265 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5266 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5268 /* Don't move directly into a return register. */
5269 if (TREE_CODE (to) == RESULT_DECL
5270 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5272 rtx temp;
5274 push_temp_slots ();
5276 /* If the source is itself a return value, it still is in a pseudo at
5277 this point so we can move it back to the return register directly. */
5278 if (REG_P (to_rtx)
5279 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5280 && TREE_CODE (from) != CALL_EXPR)
5281 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5282 else
5283 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5285 /* Handle calls that return values in multiple non-contiguous locations.
5286 The Irix 6 ABI has examples of this. */
5287 if (GET_CODE (to_rtx) == PARALLEL)
5289 if (GET_CODE (temp) == PARALLEL)
5290 emit_group_move (to_rtx, temp);
5291 else
5292 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5293 int_size_in_bytes (TREE_TYPE (from)));
5295 else if (temp)
5296 emit_move_insn (to_rtx, temp);
5298 preserve_temp_slots (to_rtx);
5299 pop_temp_slots ();
5300 return;
5303 /* In case we are returning the contents of an object which overlaps
5304 the place the value is being stored, use a safe function when copying
5305 a value through a pointer into a structure value return block. */
5306 if (TREE_CODE (to) == RESULT_DECL
5307 && TREE_CODE (from) == INDIRECT_REF
5308 && ADDR_SPACE_GENERIC_P
5309 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5310 && refs_may_alias_p (to, from)
5311 && cfun->returns_struct
5312 && !cfun->returns_pcc_struct)
5314 rtx from_rtx, size;
5316 push_temp_slots ();
5317 size = expr_size (from);
5318 from_rtx = expand_normal (from);
5320 emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5322 preserve_temp_slots (to_rtx);
5323 pop_temp_slots ();
5324 return;
5327 /* Compute FROM and store the value in the rtx we got. */
5329 push_temp_slots ();
5330 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5331 preserve_temp_slots (result);
5332 pop_temp_slots ();
5333 return;
5336 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5337 succeeded, false otherwise. */
5339 bool
5340 emit_storent_insn (rtx to, rtx from)
5342 struct expand_operand ops[2];
5343 machine_mode mode = GET_MODE (to);
5344 enum insn_code code = optab_handler (storent_optab, mode);
5346 if (code == CODE_FOR_nothing)
5347 return false;
5349 create_fixed_operand (&ops[0], to);
5350 create_input_operand (&ops[1], from, mode);
5351 return maybe_expand_insn (code, 2, ops);
5354 /* Generate code for computing expression EXP,
5355 and storing the value into TARGET.
5357 If the mode is BLKmode then we may return TARGET itself.
5358 It turns out that in BLKmode it doesn't cause a problem.
5359 because C has no operators that could combine two different
5360 assignments into the same BLKmode object with different values
5361 with no sequence point. Will other languages need this to
5362 be more thorough?
5364 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5365 stack, and block moves may need to be treated specially.
5367 If NONTEMPORAL is true, try using a nontemporal store instruction.
5369 If REVERSE is true, the store is to be done in reverse order.
5371 If BTARGET is not NULL then computed bounds of EXP are
5372 associated with BTARGET. */
5375 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5376 bool nontemporal, bool reverse, tree btarget)
5378 rtx temp;
5379 rtx alt_rtl = NULL_RTX;
5380 location_t loc = curr_insn_location ();
5382 if (VOID_TYPE_P (TREE_TYPE (exp)))
5384 /* C++ can generate ?: expressions with a throw expression in one
5385 branch and an rvalue in the other. Here, we resolve attempts to
5386 store the throw expression's nonexistent result. */
5387 gcc_assert (!call_param_p);
5388 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5389 return NULL_RTX;
5391 if (TREE_CODE (exp) == COMPOUND_EXPR)
5393 /* Perform first part of compound expression, then assign from second
5394 part. */
5395 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5396 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5397 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5398 call_param_p, nontemporal, reverse,
5399 btarget);
5401 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5403 /* For conditional expression, get safe form of the target. Then
5404 test the condition, doing the appropriate assignment on either
5405 side. This avoids the creation of unnecessary temporaries.
5406 For non-BLKmode, it is more efficient not to do this. */
5408 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5410 do_pending_stack_adjust ();
5411 NO_DEFER_POP;
5412 jumpifnot (TREE_OPERAND (exp, 0), lab1,
5413 profile_probability::uninitialized ());
5414 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5415 nontemporal, reverse, btarget);
5416 emit_jump_insn (targetm.gen_jump (lab2));
5417 emit_barrier ();
5418 emit_label (lab1);
5419 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5420 nontemporal, reverse, btarget);
5421 emit_label (lab2);
5422 OK_DEFER_POP;
5424 return NULL_RTX;
5426 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5427 /* If this is a scalar in a register that is stored in a wider mode
5428 than the declared mode, compute the result into its declared mode
5429 and then convert to the wider mode. Our value is the computed
5430 expression. */
5432 rtx inner_target = 0;
5434 /* We can do the conversion inside EXP, which will often result
5435 in some optimizations. Do the conversion in two steps: first
5436 change the signedness, if needed, then the extend. But don't
5437 do this if the type of EXP is a subtype of something else
5438 since then the conversion might involve more than just
5439 converting modes. */
5440 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5441 && TREE_TYPE (TREE_TYPE (exp)) == 0
5442 && GET_MODE_PRECISION (GET_MODE (target))
5443 == TYPE_PRECISION (TREE_TYPE (exp)))
5445 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5446 TYPE_UNSIGNED (TREE_TYPE (exp))))
5448 /* Some types, e.g. Fortran's logical*4, won't have a signed
5449 version, so use the mode instead. */
5450 tree ntype
5451 = (signed_or_unsigned_type_for
5452 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5453 if (ntype == NULL)
5454 ntype = lang_hooks.types.type_for_mode
5455 (TYPE_MODE (TREE_TYPE (exp)),
5456 SUBREG_PROMOTED_SIGN (target));
5458 exp = fold_convert_loc (loc, ntype, exp);
5461 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5462 (GET_MODE (SUBREG_REG (target)),
5463 SUBREG_PROMOTED_SIGN (target)),
5464 exp);
5466 inner_target = SUBREG_REG (target);
5469 temp = expand_expr (exp, inner_target, VOIDmode,
5470 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5472 /* Handle bounds returned by call. */
5473 if (TREE_CODE (exp) == CALL_EXPR)
5475 rtx bounds;
5476 chkp_split_slot (temp, &temp, &bounds);
5477 if (bounds && btarget)
5479 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5480 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5481 chkp_set_rtl_bounds (btarget, tmp);
5485 /* If TEMP is a VOIDmode constant, use convert_modes to make
5486 sure that we properly convert it. */
5487 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5489 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5490 temp, SUBREG_PROMOTED_SIGN (target));
5491 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5492 GET_MODE (target), temp,
5493 SUBREG_PROMOTED_SIGN (target));
5496 convert_move (SUBREG_REG (target), temp,
5497 SUBREG_PROMOTED_SIGN (target));
5499 return NULL_RTX;
5501 else if ((TREE_CODE (exp) == STRING_CST
5502 || (TREE_CODE (exp) == MEM_REF
5503 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5504 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5505 == STRING_CST
5506 && integer_zerop (TREE_OPERAND (exp, 1))))
5507 && !nontemporal && !call_param_p
5508 && MEM_P (target))
5510 /* Optimize initialization of an array with a STRING_CST. */
5511 HOST_WIDE_INT exp_len, str_copy_len;
5512 rtx dest_mem;
5513 tree str = TREE_CODE (exp) == STRING_CST
5514 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5516 exp_len = int_expr_size (exp);
5517 if (exp_len <= 0)
5518 goto normal_expr;
5520 if (TREE_STRING_LENGTH (str) <= 0)
5521 goto normal_expr;
5523 str_copy_len = strlen (TREE_STRING_POINTER (str));
5524 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5525 goto normal_expr;
5527 str_copy_len = TREE_STRING_LENGTH (str);
5528 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5529 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5531 str_copy_len += STORE_MAX_PIECES - 1;
5532 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5534 str_copy_len = MIN (str_copy_len, exp_len);
5535 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5536 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5537 MEM_ALIGN (target), false))
5538 goto normal_expr;
5540 dest_mem = target;
5542 dest_mem = store_by_pieces (dest_mem,
5543 str_copy_len, builtin_strncpy_read_str,
5544 CONST_CAST (char *,
5545 TREE_STRING_POINTER (str)),
5546 MEM_ALIGN (target), false,
5547 exp_len > str_copy_len ? 1 : 0);
5548 if (exp_len > str_copy_len)
5549 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5550 GEN_INT (exp_len - str_copy_len),
5551 BLOCK_OP_NORMAL);
5552 return NULL_RTX;
5554 else
5556 rtx tmp_target;
5558 normal_expr:
5559 /* If we want to use a nontemporal or a reverse order store, force the
5560 value into a register first. */
5561 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5562 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5563 (call_param_p
5564 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5565 &alt_rtl, false);
5567 /* Handle bounds returned by call. */
5568 if (TREE_CODE (exp) == CALL_EXPR)
5570 rtx bounds;
5571 chkp_split_slot (temp, &temp, &bounds);
5572 if (bounds && btarget)
5574 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5575 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5576 chkp_set_rtl_bounds (btarget, tmp);
5581 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5582 the same as that of TARGET, adjust the constant. This is needed, for
5583 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5584 only a word-sized value. */
5585 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5586 && TREE_CODE (exp) != ERROR_MARK
5587 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5588 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5589 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5591 /* If value was not generated in the target, store it there.
5592 Convert the value to TARGET's type first if necessary and emit the
5593 pending incrementations that have been queued when expanding EXP.
5594 Note that we cannot emit the whole queue blindly because this will
5595 effectively disable the POST_INC optimization later.
5597 If TEMP and TARGET compare equal according to rtx_equal_p, but
5598 one or both of them are volatile memory refs, we have to distinguish
5599 two cases:
5600 - expand_expr has used TARGET. In this case, we must not generate
5601 another copy. This can be detected by TARGET being equal according
5602 to == .
5603 - expand_expr has not used TARGET - that means that the source just
5604 happens to have the same RTX form. Since temp will have been created
5605 by expand_expr, it will compare unequal according to == .
5606 We must generate a copy in this case, to reach the correct number
5607 of volatile memory references. */
5609 if ((! rtx_equal_p (temp, target)
5610 || (temp != target && (side_effects_p (temp)
5611 || side_effects_p (target))))
5612 && TREE_CODE (exp) != ERROR_MARK
5613 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5614 but TARGET is not valid memory reference, TEMP will differ
5615 from TARGET although it is really the same location. */
5616 && !(alt_rtl
5617 && rtx_equal_p (alt_rtl, target)
5618 && !side_effects_p (alt_rtl)
5619 && !side_effects_p (target))
5620 /* If there's nothing to copy, don't bother. Don't call
5621 expr_size unless necessary, because some front-ends (C++)
5622 expr_size-hook must not be given objects that are not
5623 supposed to be bit-copied or bit-initialized. */
5624 && expr_size (exp) != const0_rtx)
5626 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5628 if (GET_MODE (target) == BLKmode)
5630 /* Handle calls that return BLKmode values in registers. */
5631 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5632 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5633 else
5634 store_bit_field (target,
5635 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5636 0, 0, 0, GET_MODE (temp), temp, reverse);
5638 else
5639 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5642 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5644 /* Handle copying a string constant into an array. The string
5645 constant may be shorter than the array. So copy just the string's
5646 actual length, and clear the rest. First get the size of the data
5647 type of the string, which is actually the size of the target. */
5648 rtx size = expr_size (exp);
5650 if (CONST_INT_P (size)
5651 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5652 emit_block_move (target, temp, size,
5653 (call_param_p
5654 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5655 else
5657 machine_mode pointer_mode
5658 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5659 machine_mode address_mode = get_address_mode (target);
5661 /* Compute the size of the data to copy from the string. */
5662 tree copy_size
5663 = size_binop_loc (loc, MIN_EXPR,
5664 make_tree (sizetype, size),
5665 size_int (TREE_STRING_LENGTH (exp)));
5666 rtx copy_size_rtx
5667 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5668 (call_param_p
5669 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5670 rtx_code_label *label = 0;
5672 /* Copy that much. */
5673 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5674 TYPE_UNSIGNED (sizetype));
5675 emit_block_move (target, temp, copy_size_rtx,
5676 (call_param_p
5677 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5679 /* Figure out how much is left in TARGET that we have to clear.
5680 Do all calculations in pointer_mode. */
5681 if (CONST_INT_P (copy_size_rtx))
5683 size = plus_constant (address_mode, size,
5684 -INTVAL (copy_size_rtx));
5685 target = adjust_address (target, BLKmode,
5686 INTVAL (copy_size_rtx));
5688 else
5690 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5691 copy_size_rtx, NULL_RTX, 0,
5692 OPTAB_LIB_WIDEN);
5694 if (GET_MODE (copy_size_rtx) != address_mode)
5695 copy_size_rtx = convert_to_mode (address_mode,
5696 copy_size_rtx,
5697 TYPE_UNSIGNED (sizetype));
5699 target = offset_address (target, copy_size_rtx,
5700 highest_pow2_factor (copy_size));
5701 label = gen_label_rtx ();
5702 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5703 GET_MODE (size), 0, label);
5706 if (size != const0_rtx)
5707 clear_storage (target, size, BLOCK_OP_NORMAL);
5709 if (label)
5710 emit_label (label);
5713 /* Handle calls that return values in multiple non-contiguous locations.
5714 The Irix 6 ABI has examples of this. */
5715 else if (GET_CODE (target) == PARALLEL)
5717 if (GET_CODE (temp) == PARALLEL)
5718 emit_group_move (target, temp);
5719 else
5720 emit_group_load (target, temp, TREE_TYPE (exp),
5721 int_size_in_bytes (TREE_TYPE (exp)));
5723 else if (GET_CODE (temp) == PARALLEL)
5724 emit_group_store (target, temp, TREE_TYPE (exp),
5725 int_size_in_bytes (TREE_TYPE (exp)));
5726 else if (GET_MODE (temp) == BLKmode)
5727 emit_block_move (target, temp, expr_size (exp),
5728 (call_param_p
5729 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5730 /* If we emit a nontemporal store, there is nothing else to do. */
5731 else if (nontemporal && emit_storent_insn (target, temp))
5733 else
5735 if (reverse)
5736 temp = flip_storage_order (GET_MODE (target), temp);
5737 temp = force_operand (temp, target);
5738 if (temp != target)
5739 emit_move_insn (target, temp);
5743 return NULL_RTX;
5746 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5748 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5749 bool reverse)
5751 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5752 reverse, NULL);
5755 /* Return true if field F of structure TYPE is a flexible array. */
5757 static bool
5758 flexible_array_member_p (const_tree f, const_tree type)
5760 const_tree tf;
5762 tf = TREE_TYPE (f);
5763 return (DECL_CHAIN (f) == NULL
5764 && TREE_CODE (tf) == ARRAY_TYPE
5765 && TYPE_DOMAIN (tf)
5766 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5767 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5768 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5769 && int_size_in_bytes (type) >= 0);
5772 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5773 must have in order for it to completely initialize a value of type TYPE.
5774 Return -1 if the number isn't known.
5776 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5778 static HOST_WIDE_INT
5779 count_type_elements (const_tree type, bool for_ctor_p)
5781 switch (TREE_CODE (type))
5783 case ARRAY_TYPE:
5785 tree nelts;
5787 nelts = array_type_nelts (type);
5788 if (nelts && tree_fits_uhwi_p (nelts))
5790 unsigned HOST_WIDE_INT n;
5792 n = tree_to_uhwi (nelts) + 1;
5793 if (n == 0 || for_ctor_p)
5794 return n;
5795 else
5796 return n * count_type_elements (TREE_TYPE (type), false);
5798 return for_ctor_p ? -1 : 1;
5801 case RECORD_TYPE:
5803 unsigned HOST_WIDE_INT n;
5804 tree f;
5806 n = 0;
5807 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5808 if (TREE_CODE (f) == FIELD_DECL)
5810 if (!for_ctor_p)
5811 n += count_type_elements (TREE_TYPE (f), false);
5812 else if (!flexible_array_member_p (f, type))
5813 /* Don't count flexible arrays, which are not supposed
5814 to be initialized. */
5815 n += 1;
5818 return n;
5821 case UNION_TYPE:
5822 case QUAL_UNION_TYPE:
5824 tree f;
5825 HOST_WIDE_INT n, m;
5827 gcc_assert (!for_ctor_p);
5828 /* Estimate the number of scalars in each field and pick the
5829 maximum. Other estimates would do instead; the idea is simply
5830 to make sure that the estimate is not sensitive to the ordering
5831 of the fields. */
5832 n = 1;
5833 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5834 if (TREE_CODE (f) == FIELD_DECL)
5836 m = count_type_elements (TREE_TYPE (f), false);
5837 /* If the field doesn't span the whole union, add an extra
5838 scalar for the rest. */
5839 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5840 TYPE_SIZE (type)) != 1)
5841 m++;
5842 if (n < m)
5843 n = m;
5845 return n;
5848 case COMPLEX_TYPE:
5849 return 2;
5851 case VECTOR_TYPE:
5852 return TYPE_VECTOR_SUBPARTS (type);
5854 case INTEGER_TYPE:
5855 case REAL_TYPE:
5856 case FIXED_POINT_TYPE:
5857 case ENUMERAL_TYPE:
5858 case BOOLEAN_TYPE:
5859 case POINTER_TYPE:
5860 case OFFSET_TYPE:
5861 case REFERENCE_TYPE:
5862 case NULLPTR_TYPE:
5863 return 1;
5865 case ERROR_MARK:
5866 return 0;
5868 case VOID_TYPE:
5869 case METHOD_TYPE:
5870 case FUNCTION_TYPE:
5871 case LANG_TYPE:
5872 default:
5873 gcc_unreachable ();
5877 /* Helper for categorize_ctor_elements. Identical interface. */
5879 static bool
5880 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5881 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5883 unsigned HOST_WIDE_INT idx;
5884 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5885 tree value, purpose, elt_type;
5887 /* Whether CTOR is a valid constant initializer, in accordance with what
5888 initializer_constant_valid_p does. If inferred from the constructor
5889 elements, true until proven otherwise. */
5890 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5891 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5893 nz_elts = 0;
5894 init_elts = 0;
5895 num_fields = 0;
5896 elt_type = NULL_TREE;
5898 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5900 HOST_WIDE_INT mult = 1;
5902 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5904 tree lo_index = TREE_OPERAND (purpose, 0);
5905 tree hi_index = TREE_OPERAND (purpose, 1);
5907 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5908 mult = (tree_to_uhwi (hi_index)
5909 - tree_to_uhwi (lo_index) + 1);
5911 num_fields += mult;
5912 elt_type = TREE_TYPE (value);
5914 switch (TREE_CODE (value))
5916 case CONSTRUCTOR:
5918 HOST_WIDE_INT nz = 0, ic = 0;
5920 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5921 p_complete);
5923 nz_elts += mult * nz;
5924 init_elts += mult * ic;
5926 if (const_from_elts_p && const_p)
5927 const_p = const_elt_p;
5929 break;
5931 case INTEGER_CST:
5932 case REAL_CST:
5933 case FIXED_CST:
5934 if (!initializer_zerop (value))
5935 nz_elts += mult;
5936 init_elts += mult;
5937 break;
5939 case STRING_CST:
5940 nz_elts += mult * TREE_STRING_LENGTH (value);
5941 init_elts += mult * TREE_STRING_LENGTH (value);
5942 break;
5944 case COMPLEX_CST:
5945 if (!initializer_zerop (TREE_REALPART (value)))
5946 nz_elts += mult;
5947 if (!initializer_zerop (TREE_IMAGPART (value)))
5948 nz_elts += mult;
5949 init_elts += mult;
5950 break;
5952 case VECTOR_CST:
5954 unsigned i;
5955 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5957 tree v = VECTOR_CST_ELT (value, i);
5958 if (!initializer_zerop (v))
5959 nz_elts += mult;
5960 init_elts += mult;
5963 break;
5965 default:
5967 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5968 nz_elts += mult * tc;
5969 init_elts += mult * tc;
5971 if (const_from_elts_p && const_p)
5972 const_p
5973 = initializer_constant_valid_p (value,
5974 elt_type,
5975 TYPE_REVERSE_STORAGE_ORDER
5976 (TREE_TYPE (ctor)))
5977 != NULL_TREE;
5979 break;
5983 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5984 num_fields, elt_type))
5985 *p_complete = false;
5987 *p_nz_elts += nz_elts;
5988 *p_init_elts += init_elts;
5990 return const_p;
5993 /* Examine CTOR to discover:
5994 * how many scalar fields are set to nonzero values,
5995 and place it in *P_NZ_ELTS;
5996 * how many scalar fields in total are in CTOR,
5997 and place it in *P_ELT_COUNT.
5998 * whether the constructor is complete -- in the sense that every
5999 meaningful byte is explicitly given a value --
6000 and place it in *P_COMPLETE.
6002 Return whether or not CTOR is a valid static constant initializer, the same
6003 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6005 bool
6006 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6007 HOST_WIDE_INT *p_init_elts, bool *p_complete)
6009 *p_nz_elts = 0;
6010 *p_init_elts = 0;
6011 *p_complete = true;
6013 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
6016 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6017 of which had type LAST_TYPE. Each element was itself a complete
6018 initializer, in the sense that every meaningful byte was explicitly
6019 given a value. Return true if the same is true for the constructor
6020 as a whole. */
6022 bool
6023 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6024 const_tree last_type)
6026 if (TREE_CODE (type) == UNION_TYPE
6027 || TREE_CODE (type) == QUAL_UNION_TYPE)
6029 if (num_elts == 0)
6030 return false;
6032 gcc_assert (num_elts == 1 && last_type);
6034 /* ??? We could look at each element of the union, and find the
6035 largest element. Which would avoid comparing the size of the
6036 initialized element against any tail padding in the union.
6037 Doesn't seem worth the effort... */
6038 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6041 return count_type_elements (type, true) == num_elts;
6044 /* Return 1 if EXP contains mostly (3/4) zeros. */
6046 static int
6047 mostly_zeros_p (const_tree exp)
6049 if (TREE_CODE (exp) == CONSTRUCTOR)
6051 HOST_WIDE_INT nz_elts, init_elts;
6052 bool complete_p;
6054 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6055 return !complete_p || nz_elts < init_elts / 4;
6058 return initializer_zerop (exp);
6061 /* Return 1 if EXP contains all zeros. */
6063 static int
6064 all_zeros_p (const_tree exp)
6066 if (TREE_CODE (exp) == CONSTRUCTOR)
6068 HOST_WIDE_INT nz_elts, init_elts;
6069 bool complete_p;
6071 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6072 return nz_elts == 0;
6075 return initializer_zerop (exp);
6078 /* Helper function for store_constructor.
6079 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6080 CLEARED is as for store_constructor.
6081 ALIAS_SET is the alias set to use for any stores.
6082 If REVERSE is true, the store is to be done in reverse order.
6084 This provides a recursive shortcut back to store_constructor when it isn't
6085 necessary to go through store_field. This is so that we can pass through
6086 the cleared field to let store_constructor know that we may not have to
6087 clear a substructure if the outer structure has already been cleared. */
6089 static void
6090 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
6091 HOST_WIDE_INT bitpos,
6092 unsigned HOST_WIDE_INT bitregion_start,
6093 unsigned HOST_WIDE_INT bitregion_end,
6094 machine_mode mode,
6095 tree exp, int cleared,
6096 alias_set_type alias_set, bool reverse)
6098 if (TREE_CODE (exp) == CONSTRUCTOR
6099 /* We can only call store_constructor recursively if the size and
6100 bit position are on a byte boundary. */
6101 && bitpos % BITS_PER_UNIT == 0
6102 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
6103 /* If we have a nonzero bitpos for a register target, then we just
6104 let store_field do the bitfield handling. This is unlikely to
6105 generate unnecessary clear instructions anyways. */
6106 && (bitpos == 0 || MEM_P (target)))
6108 if (MEM_P (target))
6109 target
6110 = adjust_address (target,
6111 GET_MODE (target) == BLKmode
6112 || 0 != (bitpos
6113 % GET_MODE_ALIGNMENT (GET_MODE (target)))
6114 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
6117 /* Update the alias set, if required. */
6118 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6119 && MEM_ALIAS_SET (target) != 0)
6121 target = copy_rtx (target);
6122 set_mem_alias_set (target, alias_set);
6125 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
6126 reverse);
6128 else
6129 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6130 exp, alias_set, false, reverse);
6134 /* Returns the number of FIELD_DECLs in TYPE. */
6136 static int
6137 fields_length (const_tree type)
6139 tree t = TYPE_FIELDS (type);
6140 int count = 0;
6142 for (; t; t = DECL_CHAIN (t))
6143 if (TREE_CODE (t) == FIELD_DECL)
6144 ++count;
6146 return count;
6150 /* Store the value of constructor EXP into the rtx TARGET.
6151 TARGET is either a REG or a MEM; we know it cannot conflict, since
6152 safe_from_p has been called.
6153 CLEARED is true if TARGET is known to have been zero'd.
6154 SIZE is the number of bytes of TARGET we are allowed to modify: this
6155 may not be the same as the size of EXP if we are assigning to a field
6156 which has been packed to exclude padding bits.
6157 If REVERSE is true, the store is to be done in reverse order. */
6159 static void
6160 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
6161 bool reverse)
6163 tree type = TREE_TYPE (exp);
6164 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6165 HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0;
6167 switch (TREE_CODE (type))
6169 case RECORD_TYPE:
6170 case UNION_TYPE:
6171 case QUAL_UNION_TYPE:
6173 unsigned HOST_WIDE_INT idx;
6174 tree field, value;
6176 /* The storage order is specified for every aggregate type. */
6177 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6179 /* If size is zero or the target is already cleared, do nothing. */
6180 if (size == 0 || cleared)
6181 cleared = 1;
6182 /* We either clear the aggregate or indicate the value is dead. */
6183 else if ((TREE_CODE (type) == UNION_TYPE
6184 || TREE_CODE (type) == QUAL_UNION_TYPE)
6185 && ! CONSTRUCTOR_ELTS (exp))
6186 /* If the constructor is empty, clear the union. */
6188 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6189 cleared = 1;
6192 /* If we are building a static constructor into a register,
6193 set the initial value as zero so we can fold the value into
6194 a constant. But if more than one register is involved,
6195 this probably loses. */
6196 else if (REG_P (target) && TREE_STATIC (exp)
6197 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6199 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6200 cleared = 1;
6203 /* If the constructor has fewer fields than the structure or
6204 if we are initializing the structure to mostly zeros, clear
6205 the whole structure first. Don't do this if TARGET is a
6206 register whose mode size isn't equal to SIZE since
6207 clear_storage can't handle this case. */
6208 else if (size > 0
6209 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6210 || mostly_zeros_p (exp))
6211 && (!REG_P (target)
6212 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6213 == size)))
6215 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6216 cleared = 1;
6219 if (REG_P (target) && !cleared)
6220 emit_clobber (target);
6222 /* Store each element of the constructor into the
6223 corresponding field of TARGET. */
6224 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6226 machine_mode mode;
6227 HOST_WIDE_INT bitsize;
6228 HOST_WIDE_INT bitpos = 0;
6229 tree offset;
6230 rtx to_rtx = target;
6232 /* Just ignore missing fields. We cleared the whole
6233 structure, above, if any fields are missing. */
6234 if (field == 0)
6235 continue;
6237 if (cleared && initializer_zerop (value))
6238 continue;
6240 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6241 bitsize = tree_to_uhwi (DECL_SIZE (field));
6242 else
6243 gcc_unreachable ();
6245 mode = DECL_MODE (field);
6246 if (DECL_BIT_FIELD (field))
6247 mode = VOIDmode;
6249 offset = DECL_FIELD_OFFSET (field);
6250 if (tree_fits_shwi_p (offset)
6251 && tree_fits_shwi_p (bit_position (field)))
6253 bitpos = int_bit_position (field);
6254 offset = NULL_TREE;
6256 else
6257 gcc_unreachable ();
6259 /* If this initializes a field that is smaller than a
6260 word, at the start of a word, try to widen it to a full
6261 word. This special case allows us to output C++ member
6262 function initializations in a form that the optimizers
6263 can understand. */
6264 if (WORD_REGISTER_OPERATIONS
6265 && REG_P (target)
6266 && bitsize < BITS_PER_WORD
6267 && bitpos % BITS_PER_WORD == 0
6268 && GET_MODE_CLASS (mode) == MODE_INT
6269 && TREE_CODE (value) == INTEGER_CST
6270 && exp_size >= 0
6271 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6273 tree type = TREE_TYPE (value);
6275 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6277 type = lang_hooks.types.type_for_mode
6278 (word_mode, TYPE_UNSIGNED (type));
6279 value = fold_convert (type, value);
6280 /* Make sure the bits beyond the original bitsize are zero
6281 so that we can correctly avoid extra zeroing stores in
6282 later constructor elements. */
6283 tree bitsize_mask
6284 = wide_int_to_tree (type, wi::mask (bitsize, false,
6285 BITS_PER_WORD));
6286 value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6289 if (BYTES_BIG_ENDIAN)
6290 value
6291 = fold_build2 (LSHIFT_EXPR, type, value,
6292 build_int_cst (type,
6293 BITS_PER_WORD - bitsize));
6294 bitsize = BITS_PER_WORD;
6295 mode = word_mode;
6298 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6299 && DECL_NONADDRESSABLE_P (field))
6301 to_rtx = copy_rtx (to_rtx);
6302 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6305 store_constructor_field (to_rtx, bitsize, bitpos,
6306 0, bitregion_end, mode,
6307 value, cleared,
6308 get_alias_set (TREE_TYPE (field)),
6309 reverse);
6311 break;
6313 case ARRAY_TYPE:
6315 tree value, index;
6316 unsigned HOST_WIDE_INT i;
6317 int need_to_clear;
6318 tree domain;
6319 tree elttype = TREE_TYPE (type);
6320 int const_bounds_p;
6321 HOST_WIDE_INT minelt = 0;
6322 HOST_WIDE_INT maxelt = 0;
6324 /* The storage order is specified for every aggregate type. */
6325 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6327 domain = TYPE_DOMAIN (type);
6328 const_bounds_p = (TYPE_MIN_VALUE (domain)
6329 && TYPE_MAX_VALUE (domain)
6330 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6331 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6333 /* If we have constant bounds for the range of the type, get them. */
6334 if (const_bounds_p)
6336 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6337 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6340 /* If the constructor has fewer elements than the array, clear
6341 the whole array first. Similarly if this is static
6342 constructor of a non-BLKmode object. */
6343 if (cleared)
6344 need_to_clear = 0;
6345 else if (REG_P (target) && TREE_STATIC (exp))
6346 need_to_clear = 1;
6347 else
6349 unsigned HOST_WIDE_INT idx;
6350 tree index, value;
6351 HOST_WIDE_INT count = 0, zero_count = 0;
6352 need_to_clear = ! const_bounds_p;
6354 /* This loop is a more accurate version of the loop in
6355 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6356 is also needed to check for missing elements. */
6357 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6359 HOST_WIDE_INT this_node_count;
6361 if (need_to_clear)
6362 break;
6364 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6366 tree lo_index = TREE_OPERAND (index, 0);
6367 tree hi_index = TREE_OPERAND (index, 1);
6369 if (! tree_fits_uhwi_p (lo_index)
6370 || ! tree_fits_uhwi_p (hi_index))
6372 need_to_clear = 1;
6373 break;
6376 this_node_count = (tree_to_uhwi (hi_index)
6377 - tree_to_uhwi (lo_index) + 1);
6379 else
6380 this_node_count = 1;
6382 count += this_node_count;
6383 if (mostly_zeros_p (value))
6384 zero_count += this_node_count;
6387 /* Clear the entire array first if there are any missing
6388 elements, or if the incidence of zero elements is >=
6389 75%. */
6390 if (! need_to_clear
6391 && (count < maxelt - minelt + 1
6392 || 4 * zero_count >= 3 * count))
6393 need_to_clear = 1;
6396 if (need_to_clear && size > 0)
6398 if (REG_P (target))
6399 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6400 else
6401 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6402 cleared = 1;
6405 if (!cleared && REG_P (target))
6406 /* Inform later passes that the old value is dead. */
6407 emit_clobber (target);
6409 /* Store each element of the constructor into the
6410 corresponding element of TARGET, determined by counting the
6411 elements. */
6412 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6414 machine_mode mode;
6415 HOST_WIDE_INT bitsize;
6416 HOST_WIDE_INT bitpos;
6417 rtx xtarget = target;
6419 if (cleared && initializer_zerop (value))
6420 continue;
6422 mode = TYPE_MODE (elttype);
6423 if (mode == BLKmode)
6424 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6425 ? tree_to_uhwi (TYPE_SIZE (elttype))
6426 : -1);
6427 else
6428 bitsize = GET_MODE_BITSIZE (mode);
6430 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6432 tree lo_index = TREE_OPERAND (index, 0);
6433 tree hi_index = TREE_OPERAND (index, 1);
6434 rtx index_r, pos_rtx;
6435 HOST_WIDE_INT lo, hi, count;
6436 tree position;
6438 /* If the range is constant and "small", unroll the loop. */
6439 if (const_bounds_p
6440 && tree_fits_shwi_p (lo_index)
6441 && tree_fits_shwi_p (hi_index)
6442 && (lo = tree_to_shwi (lo_index),
6443 hi = tree_to_shwi (hi_index),
6444 count = hi - lo + 1,
6445 (!MEM_P (target)
6446 || count <= 2
6447 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6448 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6449 <= 40 * 8)))))
6451 lo -= minelt; hi -= minelt;
6452 for (; lo <= hi; lo++)
6454 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6456 if (MEM_P (target)
6457 && !MEM_KEEP_ALIAS_SET_P (target)
6458 && TREE_CODE (type) == ARRAY_TYPE
6459 && TYPE_NONALIASED_COMPONENT (type))
6461 target = copy_rtx (target);
6462 MEM_KEEP_ALIAS_SET_P (target) = 1;
6465 store_constructor_field
6466 (target, bitsize, bitpos, 0, bitregion_end,
6467 mode, value, cleared,
6468 get_alias_set (elttype), reverse);
6471 else
6473 rtx_code_label *loop_start = gen_label_rtx ();
6474 rtx_code_label *loop_end = gen_label_rtx ();
6475 tree exit_cond;
6477 expand_normal (hi_index);
6479 index = build_decl (EXPR_LOCATION (exp),
6480 VAR_DECL, NULL_TREE, domain);
6481 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6482 SET_DECL_RTL (index, index_r);
6483 store_expr (lo_index, index_r, 0, false, reverse);
6485 /* Build the head of the loop. */
6486 do_pending_stack_adjust ();
6487 emit_label (loop_start);
6489 /* Assign value to element index. */
6490 position =
6491 fold_convert (ssizetype,
6492 fold_build2 (MINUS_EXPR,
6493 TREE_TYPE (index),
6494 index,
6495 TYPE_MIN_VALUE (domain)));
6497 position =
6498 size_binop (MULT_EXPR, position,
6499 fold_convert (ssizetype,
6500 TYPE_SIZE_UNIT (elttype)));
6502 pos_rtx = expand_normal (position);
6503 xtarget = offset_address (target, pos_rtx,
6504 highest_pow2_factor (position));
6505 xtarget = adjust_address (xtarget, mode, 0);
6506 if (TREE_CODE (value) == CONSTRUCTOR)
6507 store_constructor (value, xtarget, cleared,
6508 bitsize / BITS_PER_UNIT, reverse);
6509 else
6510 store_expr (value, xtarget, 0, false, reverse);
6512 /* Generate a conditional jump to exit the loop. */
6513 exit_cond = build2 (LT_EXPR, integer_type_node,
6514 index, hi_index);
6515 jumpif (exit_cond, loop_end,
6516 profile_probability::uninitialized ());
6518 /* Update the loop counter, and jump to the head of
6519 the loop. */
6520 expand_assignment (index,
6521 build2 (PLUS_EXPR, TREE_TYPE (index),
6522 index, integer_one_node),
6523 false);
6525 emit_jump (loop_start);
6527 /* Build the end of the loop. */
6528 emit_label (loop_end);
6531 else if ((index != 0 && ! tree_fits_shwi_p (index))
6532 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6534 tree position;
6536 if (index == 0)
6537 index = ssize_int (1);
6539 if (minelt)
6540 index = fold_convert (ssizetype,
6541 fold_build2 (MINUS_EXPR,
6542 TREE_TYPE (index),
6543 index,
6544 TYPE_MIN_VALUE (domain)));
6546 position =
6547 size_binop (MULT_EXPR, index,
6548 fold_convert (ssizetype,
6549 TYPE_SIZE_UNIT (elttype)));
6550 xtarget = offset_address (target,
6551 expand_normal (position),
6552 highest_pow2_factor (position));
6553 xtarget = adjust_address (xtarget, mode, 0);
6554 store_expr (value, xtarget, 0, false, reverse);
6556 else
6558 if (index != 0)
6559 bitpos = ((tree_to_shwi (index) - minelt)
6560 * tree_to_uhwi (TYPE_SIZE (elttype)));
6561 else
6562 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6564 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6565 && TREE_CODE (type) == ARRAY_TYPE
6566 && TYPE_NONALIASED_COMPONENT (type))
6568 target = copy_rtx (target);
6569 MEM_KEEP_ALIAS_SET_P (target) = 1;
6571 store_constructor_field (target, bitsize, bitpos, 0,
6572 bitregion_end, mode, value,
6573 cleared, get_alias_set (elttype),
6574 reverse);
6577 break;
6580 case VECTOR_TYPE:
6582 unsigned HOST_WIDE_INT idx;
6583 constructor_elt *ce;
6584 int i;
6585 int need_to_clear;
6586 int icode = CODE_FOR_nothing;
6587 tree elttype = TREE_TYPE (type);
6588 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6589 machine_mode eltmode = TYPE_MODE (elttype);
6590 HOST_WIDE_INT bitsize;
6591 HOST_WIDE_INT bitpos;
6592 rtvec vector = NULL;
6593 unsigned n_elts;
6594 alias_set_type alias;
6595 bool vec_vec_init_p = false;
6597 gcc_assert (eltmode != BLKmode);
6599 n_elts = TYPE_VECTOR_SUBPARTS (type);
6600 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6602 machine_mode mode = GET_MODE (target);
6603 machine_mode emode = eltmode;
6605 if (CONSTRUCTOR_NELTS (exp)
6606 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6607 == VECTOR_TYPE))
6609 tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6610 gcc_assert (CONSTRUCTOR_NELTS (exp) * TYPE_VECTOR_SUBPARTS (etype)
6611 == n_elts);
6612 emode = TYPE_MODE (etype);
6614 icode = (int) convert_optab_handler (vec_init_optab, mode, emode);
6615 if (icode != CODE_FOR_nothing)
6617 unsigned int i, n = n_elts;
6619 if (emode != eltmode)
6621 n = CONSTRUCTOR_NELTS (exp);
6622 vec_vec_init_p = true;
6624 vector = rtvec_alloc (n);
6625 for (i = 0; i < n; i++)
6626 RTVEC_ELT (vector, i) = CONST0_RTX (emode);
6630 /* If the constructor has fewer elements than the vector,
6631 clear the whole array first. Similarly if this is static
6632 constructor of a non-BLKmode object. */
6633 if (cleared)
6634 need_to_clear = 0;
6635 else if (REG_P (target) && TREE_STATIC (exp))
6636 need_to_clear = 1;
6637 else
6639 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6640 tree value;
6642 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6644 tree sz = TYPE_SIZE (TREE_TYPE (value));
6645 int n_elts_here
6646 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR, sz,
6647 TYPE_SIZE (elttype)));
6649 count += n_elts_here;
6650 if (mostly_zeros_p (value))
6651 zero_count += n_elts_here;
6654 /* Clear the entire vector first if there are any missing elements,
6655 or if the incidence of zero elements is >= 75%. */
6656 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6659 if (need_to_clear && size > 0 && !vector)
6661 if (REG_P (target))
6662 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6663 else
6664 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6665 cleared = 1;
6668 /* Inform later passes that the old value is dead. */
6669 if (!cleared && !vector && REG_P (target))
6670 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6672 if (MEM_P (target))
6673 alias = MEM_ALIAS_SET (target);
6674 else
6675 alias = get_alias_set (elttype);
6677 /* Store each element of the constructor into the corresponding
6678 element of TARGET, determined by counting the elements. */
6679 for (idx = 0, i = 0;
6680 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6681 idx++, i += bitsize / elt_size)
6683 HOST_WIDE_INT eltpos;
6684 tree value = ce->value;
6686 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6687 if (cleared && initializer_zerop (value))
6688 continue;
6690 if (ce->index)
6691 eltpos = tree_to_uhwi (ce->index);
6692 else
6693 eltpos = i;
6695 if (vector)
6697 if (vec_vec_init_p)
6699 gcc_assert (ce->index == NULL_TREE);
6700 gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
6701 eltpos = idx;
6703 else
6704 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6705 RTVEC_ELT (vector, eltpos) = expand_normal (value);
6707 else
6709 machine_mode value_mode
6710 = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6711 ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
6712 bitpos = eltpos * elt_size;
6713 store_constructor_field (target, bitsize, bitpos, 0,
6714 bitregion_end, value_mode,
6715 value, cleared, alias, reverse);
6719 if (vector)
6720 emit_insn (GEN_FCN (icode) (target,
6721 gen_rtx_PARALLEL (GET_MODE (target),
6722 vector)));
6723 break;
6726 default:
6727 gcc_unreachable ();
6731 /* Store the value of EXP (an expression tree)
6732 into a subfield of TARGET which has mode MODE and occupies
6733 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6734 If MODE is VOIDmode, it means that we are storing into a bit-field.
6736 BITREGION_START is bitpos of the first bitfield in this region.
6737 BITREGION_END is the bitpos of the ending bitfield in this region.
6738 These two fields are 0, if the C++ memory model does not apply,
6739 or we are not interested in keeping track of bitfield regions.
6741 Always return const0_rtx unless we have something particular to
6742 return.
6744 ALIAS_SET is the alias set for the destination. This value will
6745 (in general) be different from that for TARGET, since TARGET is a
6746 reference to the containing structure.
6748 If NONTEMPORAL is true, try generating a nontemporal store.
6750 If REVERSE is true, the store is to be done in reverse order. */
6752 static rtx
6753 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6754 unsigned HOST_WIDE_INT bitregion_start,
6755 unsigned HOST_WIDE_INT bitregion_end,
6756 machine_mode mode, tree exp,
6757 alias_set_type alias_set, bool nontemporal, bool reverse)
6759 if (TREE_CODE (exp) == ERROR_MARK)
6760 return const0_rtx;
6762 /* If we have nothing to store, do nothing unless the expression has
6763 side-effects. */
6764 if (bitsize == 0)
6765 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6767 if (GET_CODE (target) == CONCAT)
6769 /* We're storing into a struct containing a single __complex. */
6771 gcc_assert (!bitpos);
6772 return store_expr (exp, target, 0, nontemporal, reverse);
6775 /* If the structure is in a register or if the component
6776 is a bit field, we cannot use addressing to access it.
6777 Use bit-field techniques or SUBREG to store in it. */
6779 if (mode == VOIDmode
6780 || (mode != BLKmode && ! direct_store[(int) mode]
6781 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6782 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6783 || REG_P (target)
6784 || GET_CODE (target) == SUBREG
6785 /* If the field isn't aligned enough to store as an ordinary memref,
6786 store it as a bit field. */
6787 || (mode != BLKmode
6788 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6789 || bitpos % GET_MODE_ALIGNMENT (mode))
6790 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6791 || (bitpos % BITS_PER_UNIT != 0)))
6792 || (bitsize >= 0 && mode != BLKmode
6793 && GET_MODE_BITSIZE (mode) > bitsize)
6794 /* If the RHS and field are a constant size and the size of the
6795 RHS isn't the same size as the bitfield, we must use bitfield
6796 operations. */
6797 || (bitsize >= 0
6798 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6799 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6800 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6801 we will handle specially below. */
6802 && !(TREE_CODE (exp) == CONSTRUCTOR
6803 && bitsize % BITS_PER_UNIT == 0)
6804 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6805 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6806 includes some extra padding. store_expr / expand_expr will in
6807 that case call get_inner_reference that will have the bitsize
6808 we check here and thus the block move will not clobber the
6809 padding that shouldn't be clobbered. In the future we could
6810 replace the TREE_ADDRESSABLE check with a check that
6811 get_base_address needs to live in memory. */
6812 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6813 || TREE_CODE (exp) != COMPONENT_REF
6814 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST
6815 || (bitsize % BITS_PER_UNIT != 0)
6816 || (bitpos % BITS_PER_UNIT != 0)
6817 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize)
6818 != 0)))
6819 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6820 decl we must use bitfield operations. */
6821 || (bitsize >= 0
6822 && TREE_CODE (exp) == MEM_REF
6823 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6824 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6825 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6826 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6828 rtx temp;
6829 gimple *nop_def;
6831 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6832 implies a mask operation. If the precision is the same size as
6833 the field we're storing into, that mask is redundant. This is
6834 particularly common with bit field assignments generated by the
6835 C front end. */
6836 nop_def = get_def_for_expr (exp, NOP_EXPR);
6837 if (nop_def)
6839 tree type = TREE_TYPE (exp);
6840 if (INTEGRAL_TYPE_P (type)
6841 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6842 && bitsize == TYPE_PRECISION (type))
6844 tree op = gimple_assign_rhs1 (nop_def);
6845 type = TREE_TYPE (op);
6846 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6847 exp = op;
6851 temp = expand_normal (exp);
6853 /* Handle calls that return values in multiple non-contiguous locations.
6854 The Irix 6 ABI has examples of this. */
6855 if (GET_CODE (temp) == PARALLEL)
6857 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6858 scalar_int_mode temp_mode
6859 = smallest_int_mode_for_size (size * BITS_PER_UNIT);
6860 rtx temp_target = gen_reg_rtx (temp_mode);
6861 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6862 temp = temp_target;
6865 /* Handle calls that return BLKmode values in registers. */
6866 else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6868 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6869 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6870 temp = temp_target;
6873 /* If the value has aggregate type and an integral mode then, if BITSIZE
6874 is narrower than this mode and this is for big-endian data, we first
6875 need to put the value into the low-order bits for store_bit_field,
6876 except when MODE is BLKmode and BITSIZE larger than the word size
6877 (see the handling of fields larger than a word in store_bit_field).
6878 Moreover, the field may be not aligned on a byte boundary; in this
6879 case, if it has reverse storage order, it needs to be accessed as a
6880 scalar field with reverse storage order and we must first put the
6881 value into target order. */
6882 scalar_int_mode temp_mode;
6883 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6884 && is_int_mode (GET_MODE (temp), &temp_mode))
6886 HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
6888 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6890 if (reverse)
6891 temp = flip_storage_order (temp_mode, temp);
6893 if (bitsize < size
6894 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
6895 && !(mode == BLKmode && bitsize > BITS_PER_WORD))
6896 temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
6897 size - bitsize, NULL_RTX, 1);
6900 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6901 if (mode != VOIDmode && mode != BLKmode
6902 && mode != TYPE_MODE (TREE_TYPE (exp)))
6903 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6905 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6906 and BITPOS must be aligned on a byte boundary. If so, we simply do
6907 a block copy. Likewise for a BLKmode-like TARGET. */
6908 if (GET_MODE (temp) == BLKmode
6909 && (GET_MODE (target) == BLKmode
6910 || (MEM_P (target)
6911 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6912 && (bitpos % BITS_PER_UNIT) == 0
6913 && (bitsize % BITS_PER_UNIT) == 0)))
6915 gcc_assert (MEM_P (target) && MEM_P (temp)
6916 && (bitpos % BITS_PER_UNIT) == 0);
6918 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6919 emit_block_move (target, temp,
6920 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6921 / BITS_PER_UNIT),
6922 BLOCK_OP_NORMAL);
6924 return const0_rtx;
6927 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6928 word size, we need to load the value (see again store_bit_field). */
6929 if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD)
6931 scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
6932 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
6933 temp_mode, false, NULL);
6936 /* Store the value in the bitfield. */
6937 store_bit_field (target, bitsize, bitpos,
6938 bitregion_start, bitregion_end,
6939 mode, temp, reverse);
6941 return const0_rtx;
6943 else
6945 /* Now build a reference to just the desired component. */
6946 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6948 if (to_rtx == target)
6949 to_rtx = copy_rtx (to_rtx);
6951 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6952 set_mem_alias_set (to_rtx, alias_set);
6954 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6955 into a target smaller than its type; handle that case now. */
6956 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6958 gcc_assert (bitsize % BITS_PER_UNIT == 0);
6959 store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
6960 return to_rtx;
6963 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6967 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6968 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6969 codes and find the ultimate containing object, which we return.
6971 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6972 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6973 storage order of the field.
6974 If the position of the field is variable, we store a tree
6975 giving the variable offset (in units) in *POFFSET.
6976 This offset is in addition to the bit position.
6977 If the position is not variable, we store 0 in *POFFSET.
6979 If any of the extraction expressions is volatile,
6980 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6982 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6983 Otherwise, it is a mode that can be used to access the field.
6985 If the field describes a variable-sized object, *PMODE is set to
6986 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6987 this case, but the address of the object can be found. */
6989 tree
6990 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6991 HOST_WIDE_INT *pbitpos, tree *poffset,
6992 machine_mode *pmode, int *punsignedp,
6993 int *preversep, int *pvolatilep)
6995 tree size_tree = 0;
6996 machine_mode mode = VOIDmode;
6997 bool blkmode_bitfield = false;
6998 tree offset = size_zero_node;
6999 offset_int bit_offset = 0;
7001 /* First get the mode, signedness, storage order and size. We do this from
7002 just the outermost expression. */
7003 *pbitsize = -1;
7004 if (TREE_CODE (exp) == COMPONENT_REF)
7006 tree field = TREE_OPERAND (exp, 1);
7007 size_tree = DECL_SIZE (field);
7008 if (flag_strict_volatile_bitfields > 0
7009 && TREE_THIS_VOLATILE (exp)
7010 && DECL_BIT_FIELD_TYPE (field)
7011 && DECL_MODE (field) != BLKmode)
7012 /* Volatile bitfields should be accessed in the mode of the
7013 field's type, not the mode computed based on the bit
7014 size. */
7015 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7016 else if (!DECL_BIT_FIELD (field))
7017 mode = DECL_MODE (field);
7018 else if (DECL_MODE (field) == BLKmode)
7019 blkmode_bitfield = true;
7021 *punsignedp = DECL_UNSIGNED (field);
7023 else if (TREE_CODE (exp) == BIT_FIELD_REF)
7025 size_tree = TREE_OPERAND (exp, 1);
7026 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7027 || TYPE_UNSIGNED (TREE_TYPE (exp)));
7029 /* For vector types, with the correct size of access, use the mode of
7030 inner type. */
7031 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7032 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7033 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7034 mode = TYPE_MODE (TREE_TYPE (exp));
7036 else
7038 mode = TYPE_MODE (TREE_TYPE (exp));
7039 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7041 if (mode == BLKmode)
7042 size_tree = TYPE_SIZE (TREE_TYPE (exp));
7043 else
7044 *pbitsize = GET_MODE_BITSIZE (mode);
7047 if (size_tree != 0)
7049 if (! tree_fits_uhwi_p (size_tree))
7050 mode = BLKmode, *pbitsize = -1;
7051 else
7052 *pbitsize = tree_to_uhwi (size_tree);
7055 *preversep = reverse_storage_order_for_component_p (exp);
7057 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7058 and find the ultimate containing object. */
7059 while (1)
7061 switch (TREE_CODE (exp))
7063 case BIT_FIELD_REF:
7064 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
7065 break;
7067 case COMPONENT_REF:
7069 tree field = TREE_OPERAND (exp, 1);
7070 tree this_offset = component_ref_field_offset (exp);
7072 /* If this field hasn't been filled in yet, don't go past it.
7073 This should only happen when folding expressions made during
7074 type construction. */
7075 if (this_offset == 0)
7076 break;
7078 offset = size_binop (PLUS_EXPR, offset, this_offset);
7079 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
7081 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7083 break;
7085 case ARRAY_REF:
7086 case ARRAY_RANGE_REF:
7088 tree index = TREE_OPERAND (exp, 1);
7089 tree low_bound = array_ref_low_bound (exp);
7090 tree unit_size = array_ref_element_size (exp);
7092 /* We assume all arrays have sizes that are a multiple of a byte.
7093 First subtract the lower bound, if any, in the type of the
7094 index, then convert to sizetype and multiply by the size of
7095 the array element. */
7096 if (! integer_zerop (low_bound))
7097 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7098 index, low_bound);
7100 offset = size_binop (PLUS_EXPR, offset,
7101 size_binop (MULT_EXPR,
7102 fold_convert (sizetype, index),
7103 unit_size));
7105 break;
7107 case REALPART_EXPR:
7108 break;
7110 case IMAGPART_EXPR:
7111 bit_offset += *pbitsize;
7112 break;
7114 case VIEW_CONVERT_EXPR:
7115 break;
7117 case MEM_REF:
7118 /* Hand back the decl for MEM[&decl, off]. */
7119 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7121 tree off = TREE_OPERAND (exp, 1);
7122 if (!integer_zerop (off))
7124 offset_int boff, coff = mem_ref_offset (exp);
7125 boff = coff << LOG2_BITS_PER_UNIT;
7126 bit_offset += boff;
7128 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7130 goto done;
7132 default:
7133 goto done;
7136 /* If any reference in the chain is volatile, the effect is volatile. */
7137 if (TREE_THIS_VOLATILE (exp))
7138 *pvolatilep = 1;
7140 exp = TREE_OPERAND (exp, 0);
7142 done:
7144 /* If OFFSET is constant, see if we can return the whole thing as a
7145 constant bit position. Make sure to handle overflow during
7146 this conversion. */
7147 if (TREE_CODE (offset) == INTEGER_CST)
7149 offset_int tem = wi::sext (wi::to_offset (offset),
7150 TYPE_PRECISION (sizetype));
7151 tem <<= LOG2_BITS_PER_UNIT;
7152 tem += bit_offset;
7153 if (wi::fits_shwi_p (tem))
7155 *pbitpos = tem.to_shwi ();
7156 *poffset = offset = NULL_TREE;
7160 /* Otherwise, split it up. */
7161 if (offset)
7163 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7164 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7166 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7167 offset_int tem = bit_offset.and_not (mask);
7168 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7169 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7170 bit_offset -= tem;
7171 tem >>= LOG2_BITS_PER_UNIT;
7172 offset = size_binop (PLUS_EXPR, offset,
7173 wide_int_to_tree (sizetype, tem));
7176 *pbitpos = bit_offset.to_shwi ();
7177 *poffset = offset;
7180 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7181 if (mode == VOIDmode
7182 && blkmode_bitfield
7183 && (*pbitpos % BITS_PER_UNIT) == 0
7184 && (*pbitsize % BITS_PER_UNIT) == 0)
7185 *pmode = BLKmode;
7186 else
7187 *pmode = mode;
7189 return exp;
7192 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7194 static unsigned HOST_WIDE_INT
7195 target_align (const_tree target)
7197 /* We might have a chain of nested references with intermediate misaligning
7198 bitfields components, so need to recurse to find out. */
7200 unsigned HOST_WIDE_INT this_align, outer_align;
7202 switch (TREE_CODE (target))
7204 case BIT_FIELD_REF:
7205 return 1;
7207 case COMPONENT_REF:
7208 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7209 outer_align = target_align (TREE_OPERAND (target, 0));
7210 return MIN (this_align, outer_align);
7212 case ARRAY_REF:
7213 case ARRAY_RANGE_REF:
7214 this_align = TYPE_ALIGN (TREE_TYPE (target));
7215 outer_align = target_align (TREE_OPERAND (target, 0));
7216 return MIN (this_align, outer_align);
7218 CASE_CONVERT:
7219 case NON_LVALUE_EXPR:
7220 case VIEW_CONVERT_EXPR:
7221 this_align = TYPE_ALIGN (TREE_TYPE (target));
7222 outer_align = target_align (TREE_OPERAND (target, 0));
7223 return MAX (this_align, outer_align);
7225 default:
7226 return TYPE_ALIGN (TREE_TYPE (target));
7231 /* Given an rtx VALUE that may contain additions and multiplications, return
7232 an equivalent value that just refers to a register, memory, or constant.
7233 This is done by generating instructions to perform the arithmetic and
7234 returning a pseudo-register containing the value.
7236 The returned value may be a REG, SUBREG, MEM or constant. */
7239 force_operand (rtx value, rtx target)
7241 rtx op1, op2;
7242 /* Use subtarget as the target for operand 0 of a binary operation. */
7243 rtx subtarget = get_subtarget (target);
7244 enum rtx_code code = GET_CODE (value);
7246 /* Check for subreg applied to an expression produced by loop optimizer. */
7247 if (code == SUBREG
7248 && !REG_P (SUBREG_REG (value))
7249 && !MEM_P (SUBREG_REG (value)))
7251 value
7252 = simplify_gen_subreg (GET_MODE (value),
7253 force_reg (GET_MODE (SUBREG_REG (value)),
7254 force_operand (SUBREG_REG (value),
7255 NULL_RTX)),
7256 GET_MODE (SUBREG_REG (value)),
7257 SUBREG_BYTE (value));
7258 code = GET_CODE (value);
7261 /* Check for a PIC address load. */
7262 if ((code == PLUS || code == MINUS)
7263 && XEXP (value, 0) == pic_offset_table_rtx
7264 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7265 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7266 || GET_CODE (XEXP (value, 1)) == CONST))
7268 if (!subtarget)
7269 subtarget = gen_reg_rtx (GET_MODE (value));
7270 emit_move_insn (subtarget, value);
7271 return subtarget;
7274 if (ARITHMETIC_P (value))
7276 op2 = XEXP (value, 1);
7277 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7278 subtarget = 0;
7279 if (code == MINUS && CONST_INT_P (op2))
7281 code = PLUS;
7282 op2 = negate_rtx (GET_MODE (value), op2);
7285 /* Check for an addition with OP2 a constant integer and our first
7286 operand a PLUS of a virtual register and something else. In that
7287 case, we want to emit the sum of the virtual register and the
7288 constant first and then add the other value. This allows virtual
7289 register instantiation to simply modify the constant rather than
7290 creating another one around this addition. */
7291 if (code == PLUS && CONST_INT_P (op2)
7292 && GET_CODE (XEXP (value, 0)) == PLUS
7293 && REG_P (XEXP (XEXP (value, 0), 0))
7294 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7295 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7297 rtx temp = expand_simple_binop (GET_MODE (value), code,
7298 XEXP (XEXP (value, 0), 0), op2,
7299 subtarget, 0, OPTAB_LIB_WIDEN);
7300 return expand_simple_binop (GET_MODE (value), code, temp,
7301 force_operand (XEXP (XEXP (value,
7302 0), 1), 0),
7303 target, 0, OPTAB_LIB_WIDEN);
7306 op1 = force_operand (XEXP (value, 0), subtarget);
7307 op2 = force_operand (op2, NULL_RTX);
7308 switch (code)
7310 case MULT:
7311 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7312 case DIV:
7313 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7314 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7315 target, 1, OPTAB_LIB_WIDEN);
7316 else
7317 return expand_divmod (0,
7318 FLOAT_MODE_P (GET_MODE (value))
7319 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7320 GET_MODE (value), op1, op2, target, 0);
7321 case MOD:
7322 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7323 target, 0);
7324 case UDIV:
7325 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7326 target, 1);
7327 case UMOD:
7328 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7329 target, 1);
7330 case ASHIFTRT:
7331 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7332 target, 0, OPTAB_LIB_WIDEN);
7333 default:
7334 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7335 target, 1, OPTAB_LIB_WIDEN);
7338 if (UNARY_P (value))
7340 if (!target)
7341 target = gen_reg_rtx (GET_MODE (value));
7342 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7343 switch (code)
7345 case ZERO_EXTEND:
7346 case SIGN_EXTEND:
7347 case TRUNCATE:
7348 case FLOAT_EXTEND:
7349 case FLOAT_TRUNCATE:
7350 convert_move (target, op1, code == ZERO_EXTEND);
7351 return target;
7353 case FIX:
7354 case UNSIGNED_FIX:
7355 expand_fix (target, op1, code == UNSIGNED_FIX);
7356 return target;
7358 case FLOAT:
7359 case UNSIGNED_FLOAT:
7360 expand_float (target, op1, code == UNSIGNED_FLOAT);
7361 return target;
7363 default:
7364 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7368 #ifdef INSN_SCHEDULING
7369 /* On machines that have insn scheduling, we want all memory reference to be
7370 explicit, so we need to deal with such paradoxical SUBREGs. */
7371 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7372 value
7373 = simplify_gen_subreg (GET_MODE (value),
7374 force_reg (GET_MODE (SUBREG_REG (value)),
7375 force_operand (SUBREG_REG (value),
7376 NULL_RTX)),
7377 GET_MODE (SUBREG_REG (value)),
7378 SUBREG_BYTE (value));
7379 #endif
7381 return value;
7384 /* Subroutine of expand_expr: return nonzero iff there is no way that
7385 EXP can reference X, which is being modified. TOP_P is nonzero if this
7386 call is going to be used to determine whether we need a temporary
7387 for EXP, as opposed to a recursive call to this function.
7389 It is always safe for this routine to return zero since it merely
7390 searches for optimization opportunities. */
7393 safe_from_p (const_rtx x, tree exp, int top_p)
7395 rtx exp_rtl = 0;
7396 int i, nops;
7398 if (x == 0
7399 /* If EXP has varying size, we MUST use a target since we currently
7400 have no way of allocating temporaries of variable size
7401 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7402 So we assume here that something at a higher level has prevented a
7403 clash. This is somewhat bogus, but the best we can do. Only
7404 do this when X is BLKmode and when we are at the top level. */
7405 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7406 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7407 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7408 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7409 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7410 != INTEGER_CST)
7411 && GET_MODE (x) == BLKmode)
7412 /* If X is in the outgoing argument area, it is always safe. */
7413 || (MEM_P (x)
7414 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7415 || (GET_CODE (XEXP (x, 0)) == PLUS
7416 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7417 return 1;
7419 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7420 find the underlying pseudo. */
7421 if (GET_CODE (x) == SUBREG)
7423 x = SUBREG_REG (x);
7424 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7425 return 0;
7428 /* Now look at our tree code and possibly recurse. */
7429 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7431 case tcc_declaration:
7432 exp_rtl = DECL_RTL_IF_SET (exp);
7433 break;
7435 case tcc_constant:
7436 return 1;
7438 case tcc_exceptional:
7439 if (TREE_CODE (exp) == TREE_LIST)
7441 while (1)
7443 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7444 return 0;
7445 exp = TREE_CHAIN (exp);
7446 if (!exp)
7447 return 1;
7448 if (TREE_CODE (exp) != TREE_LIST)
7449 return safe_from_p (x, exp, 0);
7452 else if (TREE_CODE (exp) == CONSTRUCTOR)
7454 constructor_elt *ce;
7455 unsigned HOST_WIDE_INT idx;
7457 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7458 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7459 || !safe_from_p (x, ce->value, 0))
7460 return 0;
7461 return 1;
7463 else if (TREE_CODE (exp) == ERROR_MARK)
7464 return 1; /* An already-visited SAVE_EXPR? */
7465 else
7466 return 0;
7468 case tcc_statement:
7469 /* The only case we look at here is the DECL_INITIAL inside a
7470 DECL_EXPR. */
7471 return (TREE_CODE (exp) != DECL_EXPR
7472 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7473 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7474 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7476 case tcc_binary:
7477 case tcc_comparison:
7478 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7479 return 0;
7480 /* Fall through. */
7482 case tcc_unary:
7483 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7485 case tcc_expression:
7486 case tcc_reference:
7487 case tcc_vl_exp:
7488 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7489 the expression. If it is set, we conflict iff we are that rtx or
7490 both are in memory. Otherwise, we check all operands of the
7491 expression recursively. */
7493 switch (TREE_CODE (exp))
7495 case ADDR_EXPR:
7496 /* If the operand is static or we are static, we can't conflict.
7497 Likewise if we don't conflict with the operand at all. */
7498 if (staticp (TREE_OPERAND (exp, 0))
7499 || TREE_STATIC (exp)
7500 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7501 return 1;
7503 /* Otherwise, the only way this can conflict is if we are taking
7504 the address of a DECL a that address if part of X, which is
7505 very rare. */
7506 exp = TREE_OPERAND (exp, 0);
7507 if (DECL_P (exp))
7509 if (!DECL_RTL_SET_P (exp)
7510 || !MEM_P (DECL_RTL (exp)))
7511 return 0;
7512 else
7513 exp_rtl = XEXP (DECL_RTL (exp), 0);
7515 break;
7517 case MEM_REF:
7518 if (MEM_P (x)
7519 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7520 get_alias_set (exp)))
7521 return 0;
7522 break;
7524 case CALL_EXPR:
7525 /* Assume that the call will clobber all hard registers and
7526 all of memory. */
7527 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7528 || MEM_P (x))
7529 return 0;
7530 break;
7532 case WITH_CLEANUP_EXPR:
7533 case CLEANUP_POINT_EXPR:
7534 /* Lowered by gimplify.c. */
7535 gcc_unreachable ();
7537 case SAVE_EXPR:
7538 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7540 default:
7541 break;
7544 /* If we have an rtx, we do not need to scan our operands. */
7545 if (exp_rtl)
7546 break;
7548 nops = TREE_OPERAND_LENGTH (exp);
7549 for (i = 0; i < nops; i++)
7550 if (TREE_OPERAND (exp, i) != 0
7551 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7552 return 0;
7554 break;
7556 case tcc_type:
7557 /* Should never get a type here. */
7558 gcc_unreachable ();
7561 /* If we have an rtl, find any enclosed object. Then see if we conflict
7562 with it. */
7563 if (exp_rtl)
7565 if (GET_CODE (exp_rtl) == SUBREG)
7567 exp_rtl = SUBREG_REG (exp_rtl);
7568 if (REG_P (exp_rtl)
7569 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7570 return 0;
7573 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7574 are memory and they conflict. */
7575 return ! (rtx_equal_p (x, exp_rtl)
7576 || (MEM_P (x) && MEM_P (exp_rtl)
7577 && true_dependence (exp_rtl, VOIDmode, x)));
7580 /* If we reach here, it is safe. */
7581 return 1;
7585 /* Return the highest power of two that EXP is known to be a multiple of.
7586 This is used in updating alignment of MEMs in array references. */
7588 unsigned HOST_WIDE_INT
7589 highest_pow2_factor (const_tree exp)
7591 unsigned HOST_WIDE_INT ret;
7592 int trailing_zeros = tree_ctz (exp);
7593 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7594 return BIGGEST_ALIGNMENT;
7595 ret = HOST_WIDE_INT_1U << trailing_zeros;
7596 if (ret > BIGGEST_ALIGNMENT)
7597 return BIGGEST_ALIGNMENT;
7598 return ret;
7601 /* Similar, except that the alignment requirements of TARGET are
7602 taken into account. Assume it is at least as aligned as its
7603 type, unless it is a COMPONENT_REF in which case the layout of
7604 the structure gives the alignment. */
7606 static unsigned HOST_WIDE_INT
7607 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7609 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7610 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7612 return MAX (factor, talign);
7615 /* Convert the tree comparison code TCODE to the rtl one where the
7616 signedness is UNSIGNEDP. */
7618 static enum rtx_code
7619 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7621 enum rtx_code code;
7622 switch (tcode)
7624 case EQ_EXPR:
7625 code = EQ;
7626 break;
7627 case NE_EXPR:
7628 code = NE;
7629 break;
7630 case LT_EXPR:
7631 code = unsignedp ? LTU : LT;
7632 break;
7633 case LE_EXPR:
7634 code = unsignedp ? LEU : LE;
7635 break;
7636 case GT_EXPR:
7637 code = unsignedp ? GTU : GT;
7638 break;
7639 case GE_EXPR:
7640 code = unsignedp ? GEU : GE;
7641 break;
7642 case UNORDERED_EXPR:
7643 code = UNORDERED;
7644 break;
7645 case ORDERED_EXPR:
7646 code = ORDERED;
7647 break;
7648 case UNLT_EXPR:
7649 code = UNLT;
7650 break;
7651 case UNLE_EXPR:
7652 code = UNLE;
7653 break;
7654 case UNGT_EXPR:
7655 code = UNGT;
7656 break;
7657 case UNGE_EXPR:
7658 code = UNGE;
7659 break;
7660 case UNEQ_EXPR:
7661 code = UNEQ;
7662 break;
7663 case LTGT_EXPR:
7664 code = LTGT;
7665 break;
7667 default:
7668 gcc_unreachable ();
7670 return code;
7673 /* Subroutine of expand_expr. Expand the two operands of a binary
7674 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7675 The value may be stored in TARGET if TARGET is nonzero. The
7676 MODIFIER argument is as documented by expand_expr. */
7678 void
7679 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7680 enum expand_modifier modifier)
7682 if (! safe_from_p (target, exp1, 1))
7683 target = 0;
7684 if (operand_equal_p (exp0, exp1, 0))
7686 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7687 *op1 = copy_rtx (*op0);
7689 else
7691 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7692 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7697 /* Return a MEM that contains constant EXP. DEFER is as for
7698 output_constant_def and MODIFIER is as for expand_expr. */
7700 static rtx
7701 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7703 rtx mem;
7705 mem = output_constant_def (exp, defer);
7706 if (modifier != EXPAND_INITIALIZER)
7707 mem = use_anchored_address (mem);
7708 return mem;
7711 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7712 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7714 static rtx
7715 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7716 enum expand_modifier modifier, addr_space_t as)
7718 rtx result, subtarget;
7719 tree inner, offset;
7720 HOST_WIDE_INT bitsize, bitpos;
7721 int unsignedp, reversep, volatilep = 0;
7722 machine_mode mode1;
7724 /* If we are taking the address of a constant and are at the top level,
7725 we have to use output_constant_def since we can't call force_const_mem
7726 at top level. */
7727 /* ??? This should be considered a front-end bug. We should not be
7728 generating ADDR_EXPR of something that isn't an LVALUE. The only
7729 exception here is STRING_CST. */
7730 if (CONSTANT_CLASS_P (exp))
7732 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7733 if (modifier < EXPAND_SUM)
7734 result = force_operand (result, target);
7735 return result;
7738 /* Everything must be something allowed by is_gimple_addressable. */
7739 switch (TREE_CODE (exp))
7741 case INDIRECT_REF:
7742 /* This case will happen via recursion for &a->b. */
7743 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7745 case MEM_REF:
7747 tree tem = TREE_OPERAND (exp, 0);
7748 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7749 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7750 return expand_expr (tem, target, tmode, modifier);
7753 case CONST_DECL:
7754 /* Expand the initializer like constants above. */
7755 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7756 0, modifier), 0);
7757 if (modifier < EXPAND_SUM)
7758 result = force_operand (result, target);
7759 return result;
7761 case REALPART_EXPR:
7762 /* The real part of the complex number is always first, therefore
7763 the address is the same as the address of the parent object. */
7764 offset = 0;
7765 bitpos = 0;
7766 inner = TREE_OPERAND (exp, 0);
7767 break;
7769 case IMAGPART_EXPR:
7770 /* The imaginary part of the complex number is always second.
7771 The expression is therefore always offset by the size of the
7772 scalar type. */
7773 offset = 0;
7774 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7775 inner = TREE_OPERAND (exp, 0);
7776 break;
7778 case COMPOUND_LITERAL_EXPR:
7779 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7780 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7781 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7782 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7783 the initializers aren't gimplified. */
7784 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7785 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7786 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7787 target, tmode, modifier, as);
7788 /* FALLTHRU */
7789 default:
7790 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7791 expand_expr, as that can have various side effects; LABEL_DECLs for
7792 example, may not have their DECL_RTL set yet. Expand the rtl of
7793 CONSTRUCTORs too, which should yield a memory reference for the
7794 constructor's contents. Assume language specific tree nodes can
7795 be expanded in some interesting way. */
7796 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7797 if (DECL_P (exp)
7798 || TREE_CODE (exp) == CONSTRUCTOR
7799 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7801 result = expand_expr (exp, target, tmode,
7802 modifier == EXPAND_INITIALIZER
7803 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7805 /* If the DECL isn't in memory, then the DECL wasn't properly
7806 marked TREE_ADDRESSABLE, which will be either a front-end
7807 or a tree optimizer bug. */
7809 gcc_assert (MEM_P (result));
7810 result = XEXP (result, 0);
7812 /* ??? Is this needed anymore? */
7813 if (DECL_P (exp))
7814 TREE_USED (exp) = 1;
7816 if (modifier != EXPAND_INITIALIZER
7817 && modifier != EXPAND_CONST_ADDRESS
7818 && modifier != EXPAND_SUM)
7819 result = force_operand (result, target);
7820 return result;
7823 /* Pass FALSE as the last argument to get_inner_reference although
7824 we are expanding to RTL. The rationale is that we know how to
7825 handle "aligning nodes" here: we can just bypass them because
7826 they won't change the final object whose address will be returned
7827 (they actually exist only for that purpose). */
7828 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7829 &unsignedp, &reversep, &volatilep);
7830 break;
7833 /* We must have made progress. */
7834 gcc_assert (inner != exp);
7836 subtarget = offset || bitpos ? NULL_RTX : target;
7837 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7838 inner alignment, force the inner to be sufficiently aligned. */
7839 if (CONSTANT_CLASS_P (inner)
7840 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7842 inner = copy_node (inner);
7843 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7844 SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
7845 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7847 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7849 if (offset)
7851 rtx tmp;
7853 if (modifier != EXPAND_NORMAL)
7854 result = force_operand (result, NULL);
7855 tmp = expand_expr (offset, NULL_RTX, tmode,
7856 modifier == EXPAND_INITIALIZER
7857 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7859 /* expand_expr is allowed to return an object in a mode other
7860 than TMODE. If it did, we need to convert. */
7861 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7862 tmp = convert_modes (tmode, GET_MODE (tmp),
7863 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7864 result = convert_memory_address_addr_space (tmode, result, as);
7865 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7867 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7868 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7869 else
7871 subtarget = bitpos ? NULL_RTX : target;
7872 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7873 1, OPTAB_LIB_WIDEN);
7877 if (bitpos)
7879 /* Someone beforehand should have rejected taking the address
7880 of such an object. */
7881 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7883 result = convert_memory_address_addr_space (tmode, result, as);
7884 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7885 if (modifier < EXPAND_SUM)
7886 result = force_operand (result, target);
7889 return result;
7892 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7893 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7895 static rtx
7896 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7897 enum expand_modifier modifier)
7899 addr_space_t as = ADDR_SPACE_GENERIC;
7900 machine_mode address_mode = Pmode;
7901 machine_mode pointer_mode = ptr_mode;
7902 machine_mode rmode;
7903 rtx result;
7905 /* Target mode of VOIDmode says "whatever's natural". */
7906 if (tmode == VOIDmode)
7907 tmode = TYPE_MODE (TREE_TYPE (exp));
7909 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7911 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7912 address_mode = targetm.addr_space.address_mode (as);
7913 pointer_mode = targetm.addr_space.pointer_mode (as);
7916 /* We can get called with some Weird Things if the user does silliness
7917 like "(short) &a". In that case, convert_memory_address won't do
7918 the right thing, so ignore the given target mode. */
7919 if (tmode != address_mode && tmode != pointer_mode)
7920 tmode = address_mode;
7922 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7923 tmode, modifier, as);
7925 /* Despite expand_expr claims concerning ignoring TMODE when not
7926 strictly convenient, stuff breaks if we don't honor it. Note
7927 that combined with the above, we only do this for pointer modes. */
7928 rmode = GET_MODE (result);
7929 if (rmode == VOIDmode)
7930 rmode = tmode;
7931 if (rmode != tmode)
7932 result = convert_memory_address_addr_space (tmode, result, as);
7934 return result;
7937 /* Generate code for computing CONSTRUCTOR EXP.
7938 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7939 is TRUE, instead of creating a temporary variable in memory
7940 NULL is returned and the caller needs to handle it differently. */
7942 static rtx
7943 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7944 bool avoid_temp_mem)
7946 tree type = TREE_TYPE (exp);
7947 machine_mode mode = TYPE_MODE (type);
7949 /* Try to avoid creating a temporary at all. This is possible
7950 if all of the initializer is zero.
7951 FIXME: try to handle all [0..255] initializers we can handle
7952 with memset. */
7953 if (TREE_STATIC (exp)
7954 && !TREE_ADDRESSABLE (exp)
7955 && target != 0 && mode == BLKmode
7956 && all_zeros_p (exp))
7958 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7959 return target;
7962 /* All elts simple constants => refer to a constant in memory. But
7963 if this is a non-BLKmode mode, let it store a field at a time
7964 since that should make a CONST_INT, CONST_WIDE_INT or
7965 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7966 use, it is best to store directly into the target unless the type
7967 is large enough that memcpy will be used. If we are making an
7968 initializer and all operands are constant, put it in memory as
7969 well.
7971 FIXME: Avoid trying to fill vector constructors piece-meal.
7972 Output them with output_constant_def below unless we're sure
7973 they're zeros. This should go away when vector initializers
7974 are treated like VECTOR_CST instead of arrays. */
7975 if ((TREE_STATIC (exp)
7976 && ((mode == BLKmode
7977 && ! (target != 0 && safe_from_p (target, exp, 1)))
7978 || TREE_ADDRESSABLE (exp)
7979 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7980 && (! can_move_by_pieces
7981 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7982 TYPE_ALIGN (type)))
7983 && ! mostly_zeros_p (exp))))
7984 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7985 && TREE_CONSTANT (exp)))
7987 rtx constructor;
7989 if (avoid_temp_mem)
7990 return NULL_RTX;
7992 constructor = expand_expr_constant (exp, 1, modifier);
7994 if (modifier != EXPAND_CONST_ADDRESS
7995 && modifier != EXPAND_INITIALIZER
7996 && modifier != EXPAND_SUM)
7997 constructor = validize_mem (constructor);
7999 return constructor;
8002 /* Handle calls that pass values in multiple non-contiguous
8003 locations. The Irix 6 ABI has examples of this. */
8004 if (target == 0 || ! safe_from_p (target, exp, 1)
8005 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
8007 if (avoid_temp_mem)
8008 return NULL_RTX;
8010 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8013 store_constructor (exp, target, 0, int_expr_size (exp), false);
8014 return target;
8018 /* expand_expr: generate code for computing expression EXP.
8019 An rtx for the computed value is returned. The value is never null.
8020 In the case of a void EXP, const0_rtx is returned.
8022 The value may be stored in TARGET if TARGET is nonzero.
8023 TARGET is just a suggestion; callers must assume that
8024 the rtx returned may not be the same as TARGET.
8026 If TARGET is CONST0_RTX, it means that the value will be ignored.
8028 If TMODE is not VOIDmode, it suggests generating the
8029 result in mode TMODE. But this is done only when convenient.
8030 Otherwise, TMODE is ignored and the value generated in its natural mode.
8031 TMODE is just a suggestion; callers must assume that
8032 the rtx returned may not have mode TMODE.
8034 Note that TARGET may have neither TMODE nor MODE. In that case, it
8035 probably will not be used.
8037 If MODIFIER is EXPAND_SUM then when EXP is an addition
8038 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8039 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8040 products as above, or REG or MEM, or constant.
8041 Ordinarily in such cases we would output mul or add instructions
8042 and then return a pseudo reg containing the sum.
8044 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8045 it also marks a label as absolutely required (it can't be dead).
8046 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8047 This is used for outputting expressions used in initializers.
8049 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8050 with a constant address even if that address is not normally legitimate.
8051 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8053 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8054 a call parameter. Such targets require special care as we haven't yet
8055 marked TARGET so that it's safe from being trashed by libcalls. We
8056 don't want to use TARGET for anything but the final result;
8057 Intermediate values must go elsewhere. Additionally, calls to
8058 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8060 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8061 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8062 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8063 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8064 recursively.
8066 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8067 In this case, we don't adjust a returned MEM rtx that wouldn't be
8068 sufficiently aligned for its mode; instead, it's up to the caller
8069 to deal with it afterwards. This is used to make sure that unaligned
8070 base objects for which out-of-bounds accesses are supported, for
8071 example record types with trailing arrays, aren't realigned behind
8072 the back of the caller.
8073 The normal operating mode is to pass FALSE for this parameter. */
8076 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8077 enum expand_modifier modifier, rtx *alt_rtl,
8078 bool inner_reference_p)
8080 rtx ret;
8082 /* Handle ERROR_MARK before anybody tries to access its type. */
8083 if (TREE_CODE (exp) == ERROR_MARK
8084 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8086 ret = CONST0_RTX (tmode);
8087 return ret ? ret : const0_rtx;
8090 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8091 inner_reference_p);
8092 return ret;
8095 /* Try to expand the conditional expression which is represented by
8096 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8097 return the rtl reg which represents the result. Otherwise return
8098 NULL_RTX. */
8100 static rtx
8101 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8102 tree treeop1 ATTRIBUTE_UNUSED,
8103 tree treeop2 ATTRIBUTE_UNUSED)
8105 rtx insn;
8106 rtx op00, op01, op1, op2;
8107 enum rtx_code comparison_code;
8108 machine_mode comparison_mode;
8109 gimple *srcstmt;
8110 rtx temp;
8111 tree type = TREE_TYPE (treeop1);
8112 int unsignedp = TYPE_UNSIGNED (type);
8113 machine_mode mode = TYPE_MODE (type);
8114 machine_mode orig_mode = mode;
8115 static bool expanding_cond_expr_using_cmove = false;
8117 /* Conditional move expansion can end up TERing two operands which,
8118 when recursively hitting conditional expressions can result in
8119 exponential behavior if the cmove expansion ultimatively fails.
8120 It's hardly profitable to TER a cmove into a cmove so avoid doing
8121 that by failing early if we end up recursing. */
8122 if (expanding_cond_expr_using_cmove)
8123 return NULL_RTX;
8125 /* If we cannot do a conditional move on the mode, try doing it
8126 with the promoted mode. */
8127 if (!can_conditionally_move_p (mode))
8129 mode = promote_mode (type, mode, &unsignedp);
8130 if (!can_conditionally_move_p (mode))
8131 return NULL_RTX;
8132 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8134 else
8135 temp = assign_temp (type, 0, 1);
8137 expanding_cond_expr_using_cmove = true;
8138 start_sequence ();
8139 expand_operands (treeop1, treeop2,
8140 temp, &op1, &op2, EXPAND_NORMAL);
8142 if (TREE_CODE (treeop0) == SSA_NAME
8143 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8145 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8146 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8147 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8148 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8149 comparison_mode = TYPE_MODE (type);
8150 unsignedp = TYPE_UNSIGNED (type);
8151 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8153 else if (COMPARISON_CLASS_P (treeop0))
8155 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8156 enum tree_code cmpcode = TREE_CODE (treeop0);
8157 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8158 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8159 unsignedp = TYPE_UNSIGNED (type);
8160 comparison_mode = TYPE_MODE (type);
8161 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8163 else
8165 op00 = expand_normal (treeop0);
8166 op01 = const0_rtx;
8167 comparison_code = NE;
8168 comparison_mode = GET_MODE (op00);
8169 if (comparison_mode == VOIDmode)
8170 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8172 expanding_cond_expr_using_cmove = false;
8174 if (GET_MODE (op1) != mode)
8175 op1 = gen_lowpart (mode, op1);
8177 if (GET_MODE (op2) != mode)
8178 op2 = gen_lowpart (mode, op2);
8180 /* Try to emit the conditional move. */
8181 insn = emit_conditional_move (temp, comparison_code,
8182 op00, op01, comparison_mode,
8183 op1, op2, mode,
8184 unsignedp);
8186 /* If we could do the conditional move, emit the sequence,
8187 and return. */
8188 if (insn)
8190 rtx_insn *seq = get_insns ();
8191 end_sequence ();
8192 emit_insn (seq);
8193 return convert_modes (orig_mode, mode, temp, 0);
8196 /* Otherwise discard the sequence and fall back to code with
8197 branches. */
8198 end_sequence ();
8199 return NULL_RTX;
8203 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8204 enum expand_modifier modifier)
8206 rtx op0, op1, op2, temp;
8207 rtx_code_label *lab;
8208 tree type;
8209 int unsignedp;
8210 machine_mode mode;
8211 scalar_int_mode int_mode;
8212 enum tree_code code = ops->code;
8213 optab this_optab;
8214 rtx subtarget, original_target;
8215 int ignore;
8216 bool reduce_bit_field;
8217 location_t loc = ops->location;
8218 tree treeop0, treeop1, treeop2;
8219 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8220 ? reduce_to_bit_field_precision ((expr), \
8221 target, \
8222 type) \
8223 : (expr))
8225 type = ops->type;
8226 mode = TYPE_MODE (type);
8227 unsignedp = TYPE_UNSIGNED (type);
8229 treeop0 = ops->op0;
8230 treeop1 = ops->op1;
8231 treeop2 = ops->op2;
8233 /* We should be called only on simple (binary or unary) expressions,
8234 exactly those that are valid in gimple expressions that aren't
8235 GIMPLE_SINGLE_RHS (or invalid). */
8236 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8237 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8238 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8240 ignore = (target == const0_rtx
8241 || ((CONVERT_EXPR_CODE_P (code)
8242 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8243 && TREE_CODE (type) == VOID_TYPE));
8245 /* We should be called only if we need the result. */
8246 gcc_assert (!ignore);
8248 /* An operation in what may be a bit-field type needs the
8249 result to be reduced to the precision of the bit-field type,
8250 which is narrower than that of the type's mode. */
8251 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8252 && !type_has_mode_precision_p (type));
8254 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8255 target = 0;
8257 /* Use subtarget as the target for operand 0 of a binary operation. */
8258 subtarget = get_subtarget (target);
8259 original_target = target;
8261 switch (code)
8263 case NON_LVALUE_EXPR:
8264 case PAREN_EXPR:
8265 CASE_CONVERT:
8266 if (treeop0 == error_mark_node)
8267 return const0_rtx;
8269 if (TREE_CODE (type) == UNION_TYPE)
8271 tree valtype = TREE_TYPE (treeop0);
8273 /* If both input and output are BLKmode, this conversion isn't doing
8274 anything except possibly changing memory attribute. */
8275 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8277 rtx result = expand_expr (treeop0, target, tmode,
8278 modifier);
8280 result = copy_rtx (result);
8281 set_mem_attributes (result, type, 0);
8282 return result;
8285 if (target == 0)
8287 if (TYPE_MODE (type) != BLKmode)
8288 target = gen_reg_rtx (TYPE_MODE (type));
8289 else
8290 target = assign_temp (type, 1, 1);
8293 if (MEM_P (target))
8294 /* Store data into beginning of memory target. */
8295 store_expr (treeop0,
8296 adjust_address (target, TYPE_MODE (valtype), 0),
8297 modifier == EXPAND_STACK_PARM,
8298 false, TYPE_REVERSE_STORAGE_ORDER (type));
8300 else
8302 gcc_assert (REG_P (target)
8303 && !TYPE_REVERSE_STORAGE_ORDER (type));
8305 /* Store this field into a union of the proper type. */
8306 store_field (target,
8307 MIN ((int_size_in_bytes (TREE_TYPE
8308 (treeop0))
8309 * BITS_PER_UNIT),
8310 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8311 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8312 false, false);
8315 /* Return the entire union. */
8316 return target;
8319 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8321 op0 = expand_expr (treeop0, target, VOIDmode,
8322 modifier);
8324 /* If the signedness of the conversion differs and OP0 is
8325 a promoted SUBREG, clear that indication since we now
8326 have to do the proper extension. */
8327 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8328 && GET_CODE (op0) == SUBREG)
8329 SUBREG_PROMOTED_VAR_P (op0) = 0;
8331 return REDUCE_BIT_FIELD (op0);
8334 op0 = expand_expr (treeop0, NULL_RTX, mode,
8335 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8336 if (GET_MODE (op0) == mode)
8339 /* If OP0 is a constant, just convert it into the proper mode. */
8340 else if (CONSTANT_P (op0))
8342 tree inner_type = TREE_TYPE (treeop0);
8343 machine_mode inner_mode = GET_MODE (op0);
8345 if (inner_mode == VOIDmode)
8346 inner_mode = TYPE_MODE (inner_type);
8348 if (modifier == EXPAND_INITIALIZER)
8349 op0 = lowpart_subreg (mode, op0, inner_mode);
8350 else
8351 op0= convert_modes (mode, inner_mode, op0,
8352 TYPE_UNSIGNED (inner_type));
8355 else if (modifier == EXPAND_INITIALIZER)
8356 op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8357 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8359 else if (target == 0)
8360 op0 = convert_to_mode (mode, op0,
8361 TYPE_UNSIGNED (TREE_TYPE
8362 (treeop0)));
8363 else
8365 convert_move (target, op0,
8366 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8367 op0 = target;
8370 return REDUCE_BIT_FIELD (op0);
8372 case ADDR_SPACE_CONVERT_EXPR:
8374 tree treeop0_type = TREE_TYPE (treeop0);
8376 gcc_assert (POINTER_TYPE_P (type));
8377 gcc_assert (POINTER_TYPE_P (treeop0_type));
8379 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8380 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8382 /* Conversions between pointers to the same address space should
8383 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8384 gcc_assert (as_to != as_from);
8386 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8388 /* Ask target code to handle conversion between pointers
8389 to overlapping address spaces. */
8390 if (targetm.addr_space.subset_p (as_to, as_from)
8391 || targetm.addr_space.subset_p (as_from, as_to))
8393 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8395 else
8397 /* For disjoint address spaces, converting anything but a null
8398 pointer invokes undefined behavior. We truncate or extend the
8399 value as if we'd converted via integers, which handles 0 as
8400 required, and all others as the programmer likely expects. */
8401 #ifndef POINTERS_EXTEND_UNSIGNED
8402 const int POINTERS_EXTEND_UNSIGNED = 1;
8403 #endif
8404 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8405 op0, POINTERS_EXTEND_UNSIGNED);
8407 gcc_assert (op0);
8408 return op0;
8411 case POINTER_PLUS_EXPR:
8412 /* Even though the sizetype mode and the pointer's mode can be different
8413 expand is able to handle this correctly and get the correct result out
8414 of the PLUS_EXPR code. */
8415 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8416 if sizetype precision is smaller than pointer precision. */
8417 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8418 treeop1 = fold_convert_loc (loc, type,
8419 fold_convert_loc (loc, ssizetype,
8420 treeop1));
8421 /* If sizetype precision is larger than pointer precision, truncate the
8422 offset to have matching modes. */
8423 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8424 treeop1 = fold_convert_loc (loc, type, treeop1);
8425 /* FALLTHRU */
8427 case PLUS_EXPR:
8428 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8429 something else, make sure we add the register to the constant and
8430 then to the other thing. This case can occur during strength
8431 reduction and doing it this way will produce better code if the
8432 frame pointer or argument pointer is eliminated.
8434 fold-const.c will ensure that the constant is always in the inner
8435 PLUS_EXPR, so the only case we need to do anything about is if
8436 sp, ap, or fp is our second argument, in which case we must swap
8437 the innermost first argument and our second argument. */
8439 if (TREE_CODE (treeop0) == PLUS_EXPR
8440 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8441 && VAR_P (treeop1)
8442 && (DECL_RTL (treeop1) == frame_pointer_rtx
8443 || DECL_RTL (treeop1) == stack_pointer_rtx
8444 || DECL_RTL (treeop1) == arg_pointer_rtx))
8446 gcc_unreachable ();
8449 /* If the result is to be ptr_mode and we are adding an integer to
8450 something, we might be forming a constant. So try to use
8451 plus_constant. If it produces a sum and we can't accept it,
8452 use force_operand. This allows P = &ARR[const] to generate
8453 efficient code on machines where a SYMBOL_REF is not a valid
8454 address.
8456 If this is an EXPAND_SUM call, always return the sum. */
8457 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8458 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8460 if (modifier == EXPAND_STACK_PARM)
8461 target = 0;
8462 if (TREE_CODE (treeop0) == INTEGER_CST
8463 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8464 && TREE_CONSTANT (treeop1))
8466 rtx constant_part;
8467 HOST_WIDE_INT wc;
8468 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8470 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8471 EXPAND_SUM);
8472 /* Use wi::shwi to ensure that the constant is
8473 truncated according to the mode of OP1, then sign extended
8474 to a HOST_WIDE_INT. Using the constant directly can result
8475 in non-canonical RTL in a 64x32 cross compile. */
8476 wc = TREE_INT_CST_LOW (treeop0);
8477 constant_part =
8478 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8479 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8480 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8481 op1 = force_operand (op1, target);
8482 return REDUCE_BIT_FIELD (op1);
8485 else if (TREE_CODE (treeop1) == INTEGER_CST
8486 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8487 && TREE_CONSTANT (treeop0))
8489 rtx constant_part;
8490 HOST_WIDE_INT wc;
8491 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8493 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8494 (modifier == EXPAND_INITIALIZER
8495 ? EXPAND_INITIALIZER : EXPAND_SUM));
8496 if (! CONSTANT_P (op0))
8498 op1 = expand_expr (treeop1, NULL_RTX,
8499 VOIDmode, modifier);
8500 /* Return a PLUS if modifier says it's OK. */
8501 if (modifier == EXPAND_SUM
8502 || modifier == EXPAND_INITIALIZER)
8503 return simplify_gen_binary (PLUS, mode, op0, op1);
8504 goto binop2;
8506 /* Use wi::shwi to ensure that the constant is
8507 truncated according to the mode of OP1, then sign extended
8508 to a HOST_WIDE_INT. Using the constant directly can result
8509 in non-canonical RTL in a 64x32 cross compile. */
8510 wc = TREE_INT_CST_LOW (treeop1);
8511 constant_part
8512 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8513 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8514 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8515 op0 = force_operand (op0, target);
8516 return REDUCE_BIT_FIELD (op0);
8520 /* Use TER to expand pointer addition of a negated value
8521 as pointer subtraction. */
8522 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8523 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8524 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8525 && TREE_CODE (treeop1) == SSA_NAME
8526 && TYPE_MODE (TREE_TYPE (treeop0))
8527 == TYPE_MODE (TREE_TYPE (treeop1)))
8529 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8530 if (def)
8532 treeop1 = gimple_assign_rhs1 (def);
8533 code = MINUS_EXPR;
8534 goto do_minus;
8538 /* No sense saving up arithmetic to be done
8539 if it's all in the wrong mode to form part of an address.
8540 And force_operand won't know whether to sign-extend or
8541 zero-extend. */
8542 if (modifier != EXPAND_INITIALIZER
8543 && (modifier != EXPAND_SUM || mode != ptr_mode))
8545 expand_operands (treeop0, treeop1,
8546 subtarget, &op0, &op1, modifier);
8547 if (op0 == const0_rtx)
8548 return op1;
8549 if (op1 == const0_rtx)
8550 return op0;
8551 goto binop2;
8554 expand_operands (treeop0, treeop1,
8555 subtarget, &op0, &op1, modifier);
8556 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8558 case MINUS_EXPR:
8559 do_minus:
8560 /* For initializers, we are allowed to return a MINUS of two
8561 symbolic constants. Here we handle all cases when both operands
8562 are constant. */
8563 /* Handle difference of two symbolic constants,
8564 for the sake of an initializer. */
8565 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8566 && really_constant_p (treeop0)
8567 && really_constant_p (treeop1))
8569 expand_operands (treeop0, treeop1,
8570 NULL_RTX, &op0, &op1, modifier);
8572 /* If the last operand is a CONST_INT, use plus_constant of
8573 the negated constant. Else make the MINUS. */
8574 if (CONST_INT_P (op1))
8575 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8576 -INTVAL (op1)));
8577 else
8578 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8581 /* No sense saving up arithmetic to be done
8582 if it's all in the wrong mode to form part of an address.
8583 And force_operand won't know whether to sign-extend or
8584 zero-extend. */
8585 if (modifier != EXPAND_INITIALIZER
8586 && (modifier != EXPAND_SUM || mode != ptr_mode))
8587 goto binop;
8589 expand_operands (treeop0, treeop1,
8590 subtarget, &op0, &op1, modifier);
8592 /* Convert A - const to A + (-const). */
8593 if (CONST_INT_P (op1))
8595 op1 = negate_rtx (mode, op1);
8596 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8599 goto binop2;
8601 case WIDEN_MULT_PLUS_EXPR:
8602 case WIDEN_MULT_MINUS_EXPR:
8603 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8604 op2 = expand_normal (treeop2);
8605 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8606 target, unsignedp);
8607 return target;
8609 case WIDEN_MULT_EXPR:
8610 /* If first operand is constant, swap them.
8611 Thus the following special case checks need only
8612 check the second operand. */
8613 if (TREE_CODE (treeop0) == INTEGER_CST)
8614 std::swap (treeop0, treeop1);
8616 /* First, check if we have a multiplication of one signed and one
8617 unsigned operand. */
8618 if (TREE_CODE (treeop1) != INTEGER_CST
8619 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8620 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8622 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8623 this_optab = usmul_widen_optab;
8624 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8625 != CODE_FOR_nothing)
8627 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8628 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8629 EXPAND_NORMAL);
8630 else
8631 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8632 EXPAND_NORMAL);
8633 /* op0 and op1 might still be constant, despite the above
8634 != INTEGER_CST check. Handle it. */
8635 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8637 op0 = convert_modes (innermode, mode, op0, true);
8638 op1 = convert_modes (innermode, mode, op1, false);
8639 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8640 target, unsignedp));
8642 goto binop3;
8645 /* Check for a multiplication with matching signedness. */
8646 else if ((TREE_CODE (treeop1) == INTEGER_CST
8647 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8648 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8649 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8651 tree op0type = TREE_TYPE (treeop0);
8652 machine_mode innermode = TYPE_MODE (op0type);
8653 bool zextend_p = TYPE_UNSIGNED (op0type);
8654 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8655 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8657 if (TREE_CODE (treeop0) != INTEGER_CST)
8659 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8660 != CODE_FOR_nothing)
8662 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8663 EXPAND_NORMAL);
8664 /* op0 and op1 might still be constant, despite the above
8665 != INTEGER_CST check. Handle it. */
8666 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8668 widen_mult_const:
8669 op0 = convert_modes (innermode, mode, op0, zextend_p);
8671 = convert_modes (innermode, mode, op1,
8672 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8673 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8674 target,
8675 unsignedp));
8677 temp = expand_widening_mult (mode, op0, op1, target,
8678 unsignedp, this_optab);
8679 return REDUCE_BIT_FIELD (temp);
8681 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8682 != CODE_FOR_nothing
8683 && innermode == word_mode)
8685 rtx htem, hipart;
8686 op0 = expand_normal (treeop0);
8687 if (TREE_CODE (treeop1) == INTEGER_CST)
8688 op1 = convert_modes (innermode, mode,
8689 expand_normal (treeop1),
8690 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8691 else
8692 op1 = expand_normal (treeop1);
8693 /* op0 and op1 might still be constant, despite the above
8694 != INTEGER_CST check. Handle it. */
8695 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8696 goto widen_mult_const;
8697 temp = expand_binop (mode, other_optab, op0, op1, target,
8698 unsignedp, OPTAB_LIB_WIDEN);
8699 hipart = gen_highpart (innermode, temp);
8700 htem = expand_mult_highpart_adjust (innermode, hipart,
8701 op0, op1, hipart,
8702 zextend_p);
8703 if (htem != hipart)
8704 emit_move_insn (hipart, htem);
8705 return REDUCE_BIT_FIELD (temp);
8709 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8710 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8711 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8712 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8714 case FMA_EXPR:
8716 optab opt = fma_optab;
8717 gimple *def0, *def2;
8719 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8720 call. */
8721 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8723 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8724 tree call_expr;
8726 gcc_assert (fn != NULL_TREE);
8727 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8728 return expand_builtin (call_expr, target, subtarget, mode, false);
8731 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8732 /* The multiplication is commutative - look at its 2nd operand
8733 if the first isn't fed by a negate. */
8734 if (!def0)
8736 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8737 /* Swap operands if the 2nd operand is fed by a negate. */
8738 if (def0)
8739 std::swap (treeop0, treeop1);
8741 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8743 op0 = op2 = NULL;
8745 if (def0 && def2
8746 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8748 opt = fnms_optab;
8749 op0 = expand_normal (gimple_assign_rhs1 (def0));
8750 op2 = expand_normal (gimple_assign_rhs1 (def2));
8752 else if (def0
8753 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8755 opt = fnma_optab;
8756 op0 = expand_normal (gimple_assign_rhs1 (def0));
8758 else if (def2
8759 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8761 opt = fms_optab;
8762 op2 = expand_normal (gimple_assign_rhs1 (def2));
8765 if (op0 == NULL)
8766 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8767 if (op2 == NULL)
8768 op2 = expand_normal (treeop2);
8769 op1 = expand_normal (treeop1);
8771 return expand_ternary_op (TYPE_MODE (type), opt,
8772 op0, op1, op2, target, 0);
8775 case MULT_EXPR:
8776 /* If this is a fixed-point operation, then we cannot use the code
8777 below because "expand_mult" doesn't support sat/no-sat fixed-point
8778 multiplications. */
8779 if (ALL_FIXED_POINT_MODE_P (mode))
8780 goto binop;
8782 /* If first operand is constant, swap them.
8783 Thus the following special case checks need only
8784 check the second operand. */
8785 if (TREE_CODE (treeop0) == INTEGER_CST)
8786 std::swap (treeop0, treeop1);
8788 /* Attempt to return something suitable for generating an
8789 indexed address, for machines that support that. */
8791 if (modifier == EXPAND_SUM && mode == ptr_mode
8792 && tree_fits_shwi_p (treeop1))
8794 tree exp1 = treeop1;
8796 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8797 EXPAND_SUM);
8799 if (!REG_P (op0))
8800 op0 = force_operand (op0, NULL_RTX);
8801 if (!REG_P (op0))
8802 op0 = copy_to_mode_reg (mode, op0);
8804 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8805 gen_int_mode (tree_to_shwi (exp1),
8806 TYPE_MODE (TREE_TYPE (exp1)))));
8809 if (modifier == EXPAND_STACK_PARM)
8810 target = 0;
8812 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8813 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8815 case TRUNC_MOD_EXPR:
8816 case FLOOR_MOD_EXPR:
8817 case CEIL_MOD_EXPR:
8818 case ROUND_MOD_EXPR:
8820 case TRUNC_DIV_EXPR:
8821 case FLOOR_DIV_EXPR:
8822 case CEIL_DIV_EXPR:
8823 case ROUND_DIV_EXPR:
8824 case EXACT_DIV_EXPR:
8826 /* If this is a fixed-point operation, then we cannot use the code
8827 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8828 divisions. */
8829 if (ALL_FIXED_POINT_MODE_P (mode))
8830 goto binop;
8832 if (modifier == EXPAND_STACK_PARM)
8833 target = 0;
8834 /* Possible optimization: compute the dividend with EXPAND_SUM
8835 then if the divisor is constant can optimize the case
8836 where some terms of the dividend have coeffs divisible by it. */
8837 expand_operands (treeop0, treeop1,
8838 subtarget, &op0, &op1, EXPAND_NORMAL);
8839 bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
8840 || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
8841 if (SCALAR_INT_MODE_P (mode)
8842 && optimize >= 2
8843 && get_range_pos_neg (treeop0) == 1
8844 && get_range_pos_neg (treeop1) == 1)
8846 /* If both arguments are known to be positive when interpreted
8847 as signed, we can expand it as both signed and unsigned
8848 division or modulo. Choose the cheaper sequence in that case. */
8849 bool speed_p = optimize_insn_for_speed_p ();
8850 do_pending_stack_adjust ();
8851 start_sequence ();
8852 rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
8853 rtx_insn *uns_insns = get_insns ();
8854 end_sequence ();
8855 start_sequence ();
8856 rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
8857 rtx_insn *sgn_insns = get_insns ();
8858 end_sequence ();
8859 unsigned uns_cost = seq_cost (uns_insns, speed_p);
8860 unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
8862 /* If costs are the same then use as tie breaker the other
8863 other factor. */
8864 if (uns_cost == sgn_cost)
8866 uns_cost = seq_cost (uns_insns, !speed_p);
8867 sgn_cost = seq_cost (sgn_insns, !speed_p);
8870 if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
8872 emit_insn (uns_insns);
8873 return uns_ret;
8875 emit_insn (sgn_insns);
8876 return sgn_ret;
8878 return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
8880 case RDIV_EXPR:
8881 goto binop;
8883 case MULT_HIGHPART_EXPR:
8884 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8885 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8886 gcc_assert (temp);
8887 return temp;
8889 case FIXED_CONVERT_EXPR:
8890 op0 = expand_normal (treeop0);
8891 if (target == 0 || modifier == EXPAND_STACK_PARM)
8892 target = gen_reg_rtx (mode);
8894 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8895 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8896 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8897 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8898 else
8899 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8900 return target;
8902 case FIX_TRUNC_EXPR:
8903 op0 = expand_normal (treeop0);
8904 if (target == 0 || modifier == EXPAND_STACK_PARM)
8905 target = gen_reg_rtx (mode);
8906 expand_fix (target, op0, unsignedp);
8907 return target;
8909 case FLOAT_EXPR:
8910 op0 = expand_normal (treeop0);
8911 if (target == 0 || modifier == EXPAND_STACK_PARM)
8912 target = gen_reg_rtx (mode);
8913 /* expand_float can't figure out what to do if FROM has VOIDmode.
8914 So give it the correct mode. With -O, cse will optimize this. */
8915 if (GET_MODE (op0) == VOIDmode)
8916 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8917 op0);
8918 expand_float (target, op0,
8919 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8920 return target;
8922 case NEGATE_EXPR:
8923 op0 = expand_expr (treeop0, subtarget,
8924 VOIDmode, EXPAND_NORMAL);
8925 if (modifier == EXPAND_STACK_PARM)
8926 target = 0;
8927 temp = expand_unop (mode,
8928 optab_for_tree_code (NEGATE_EXPR, type,
8929 optab_default),
8930 op0, target, 0);
8931 gcc_assert (temp);
8932 return REDUCE_BIT_FIELD (temp);
8934 case ABS_EXPR:
8935 op0 = expand_expr (treeop0, subtarget,
8936 VOIDmode, EXPAND_NORMAL);
8937 if (modifier == EXPAND_STACK_PARM)
8938 target = 0;
8940 /* ABS_EXPR is not valid for complex arguments. */
8941 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8942 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8944 /* Unsigned abs is simply the operand. Testing here means we don't
8945 risk generating incorrect code below. */
8946 if (TYPE_UNSIGNED (type))
8947 return op0;
8949 return expand_abs (mode, op0, target, unsignedp,
8950 safe_from_p (target, treeop0, 1));
8952 case MAX_EXPR:
8953 case MIN_EXPR:
8954 target = original_target;
8955 if (target == 0
8956 || modifier == EXPAND_STACK_PARM
8957 || (MEM_P (target) && MEM_VOLATILE_P (target))
8958 || GET_MODE (target) != mode
8959 || (REG_P (target)
8960 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8961 target = gen_reg_rtx (mode);
8962 expand_operands (treeop0, treeop1,
8963 target, &op0, &op1, EXPAND_NORMAL);
8965 /* First try to do it with a special MIN or MAX instruction.
8966 If that does not win, use a conditional jump to select the proper
8967 value. */
8968 this_optab = optab_for_tree_code (code, type, optab_default);
8969 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8970 OPTAB_WIDEN);
8971 if (temp != 0)
8972 return temp;
8974 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
8975 and similarly for MAX <x, y>. */
8976 if (VECTOR_TYPE_P (type))
8978 tree t0 = make_tree (type, op0);
8979 tree t1 = make_tree (type, op1);
8980 tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
8981 type, t0, t1);
8982 return expand_vec_cond_expr (type, comparison, t0, t1,
8983 original_target);
8986 /* At this point, a MEM target is no longer useful; we will get better
8987 code without it. */
8989 if (! REG_P (target))
8990 target = gen_reg_rtx (mode);
8992 /* If op1 was placed in target, swap op0 and op1. */
8993 if (target != op0 && target == op1)
8994 std::swap (op0, op1);
8996 /* We generate better code and avoid problems with op1 mentioning
8997 target by forcing op1 into a pseudo if it isn't a constant. */
8998 if (! CONSTANT_P (op1))
8999 op1 = force_reg (mode, op1);
9002 enum rtx_code comparison_code;
9003 rtx cmpop1 = op1;
9005 if (code == MAX_EXPR)
9006 comparison_code = unsignedp ? GEU : GE;
9007 else
9008 comparison_code = unsignedp ? LEU : LE;
9010 /* Canonicalize to comparisons against 0. */
9011 if (op1 == const1_rtx)
9013 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9014 or (a != 0 ? a : 1) for unsigned.
9015 For MIN we are safe converting (a <= 1 ? a : 1)
9016 into (a <= 0 ? a : 1) */
9017 cmpop1 = const0_rtx;
9018 if (code == MAX_EXPR)
9019 comparison_code = unsignedp ? NE : GT;
9021 if (op1 == constm1_rtx && !unsignedp)
9023 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9024 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9025 cmpop1 = const0_rtx;
9026 if (code == MIN_EXPR)
9027 comparison_code = LT;
9030 /* Use a conditional move if possible. */
9031 if (can_conditionally_move_p (mode))
9033 rtx insn;
9035 start_sequence ();
9037 /* Try to emit the conditional move. */
9038 insn = emit_conditional_move (target, comparison_code,
9039 op0, cmpop1, mode,
9040 op0, op1, mode,
9041 unsignedp);
9043 /* If we could do the conditional move, emit the sequence,
9044 and return. */
9045 if (insn)
9047 rtx_insn *seq = get_insns ();
9048 end_sequence ();
9049 emit_insn (seq);
9050 return target;
9053 /* Otherwise discard the sequence and fall back to code with
9054 branches. */
9055 end_sequence ();
9058 if (target != op0)
9059 emit_move_insn (target, op0);
9061 lab = gen_label_rtx ();
9062 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9063 unsignedp, mode, NULL_RTX, NULL, lab,
9064 profile_probability::uninitialized ());
9066 emit_move_insn (target, op1);
9067 emit_label (lab);
9068 return target;
9070 case BIT_NOT_EXPR:
9071 op0 = expand_expr (treeop0, subtarget,
9072 VOIDmode, EXPAND_NORMAL);
9073 if (modifier == EXPAND_STACK_PARM)
9074 target = 0;
9075 /* In case we have to reduce the result to bitfield precision
9076 for unsigned bitfield expand this as XOR with a proper constant
9077 instead. */
9078 if (reduce_bit_field && TYPE_UNSIGNED (type))
9080 int_mode = SCALAR_INT_TYPE_MODE (type);
9081 wide_int mask = wi::mask (TYPE_PRECISION (type),
9082 false, GET_MODE_PRECISION (int_mode));
9084 temp = expand_binop (int_mode, xor_optab, op0,
9085 immed_wide_int_const (mask, int_mode),
9086 target, 1, OPTAB_LIB_WIDEN);
9088 else
9089 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9090 gcc_assert (temp);
9091 return temp;
9093 /* ??? Can optimize bitwise operations with one arg constant.
9094 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9095 and (a bitwise1 b) bitwise2 b (etc)
9096 but that is probably not worth while. */
9098 case BIT_AND_EXPR:
9099 case BIT_IOR_EXPR:
9100 case BIT_XOR_EXPR:
9101 goto binop;
9103 case LROTATE_EXPR:
9104 case RROTATE_EXPR:
9105 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9106 || type_has_mode_precision_p (type));
9107 /* fall through */
9109 case LSHIFT_EXPR:
9110 case RSHIFT_EXPR:
9112 /* If this is a fixed-point operation, then we cannot use the code
9113 below because "expand_shift" doesn't support sat/no-sat fixed-point
9114 shifts. */
9115 if (ALL_FIXED_POINT_MODE_P (mode))
9116 goto binop;
9118 if (! safe_from_p (subtarget, treeop1, 1))
9119 subtarget = 0;
9120 if (modifier == EXPAND_STACK_PARM)
9121 target = 0;
9122 op0 = expand_expr (treeop0, subtarget,
9123 VOIDmode, EXPAND_NORMAL);
9125 /* Left shift optimization when shifting across word_size boundary.
9127 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9128 there isn't native instruction to support this wide mode
9129 left shift. Given below scenario:
9131 Type A = (Type) B << C
9133 |< T >|
9134 | dest_high | dest_low |
9136 | word_size |
9138 If the shift amount C caused we shift B to across the word
9139 size boundary, i.e part of B shifted into high half of
9140 destination register, and part of B remains in the low
9141 half, then GCC will use the following left shift expand
9142 logic:
9144 1. Initialize dest_low to B.
9145 2. Initialize every bit of dest_high to the sign bit of B.
9146 3. Logic left shift dest_low by C bit to finalize dest_low.
9147 The value of dest_low before this shift is kept in a temp D.
9148 4. Logic left shift dest_high by C.
9149 5. Logic right shift D by (word_size - C).
9150 6. Or the result of 4 and 5 to finalize dest_high.
9152 While, by checking gimple statements, if operand B is
9153 coming from signed extension, then we can simplify above
9154 expand logic into:
9156 1. dest_high = src_low >> (word_size - C).
9157 2. dest_low = src_low << C.
9159 We can use one arithmetic right shift to finish all the
9160 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9161 needed from 6 into 2.
9163 The case is similar for zero extension, except that we
9164 initialize dest_high to zero rather than copies of the sign
9165 bit from B. Furthermore, we need to use a logical right shift
9166 in this case.
9168 The choice of sign-extension versus zero-extension is
9169 determined entirely by whether or not B is signed and is
9170 independent of the current setting of unsignedp. */
9172 temp = NULL_RTX;
9173 if (code == LSHIFT_EXPR
9174 && target
9175 && REG_P (target)
9176 && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9177 && mode == int_mode
9178 && TREE_CONSTANT (treeop1)
9179 && TREE_CODE (treeop0) == SSA_NAME)
9181 gimple *def = SSA_NAME_DEF_STMT (treeop0);
9182 if (is_gimple_assign (def)
9183 && gimple_assign_rhs_code (def) == NOP_EXPR)
9185 scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9186 (TREE_TYPE (gimple_assign_rhs1 (def)));
9188 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9189 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9190 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9191 >= GET_MODE_BITSIZE (word_mode)))
9193 rtx_insn *seq, *seq_old;
9194 unsigned int high_off = subreg_highpart_offset (word_mode,
9195 int_mode);
9196 bool extend_unsigned
9197 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9198 rtx low = lowpart_subreg (word_mode, op0, int_mode);
9199 rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9200 rtx dest_high = simplify_gen_subreg (word_mode, target,
9201 int_mode, high_off);
9202 HOST_WIDE_INT ramount = (BITS_PER_WORD
9203 - TREE_INT_CST_LOW (treeop1));
9204 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9206 start_sequence ();
9207 /* dest_high = src_low >> (word_size - C). */
9208 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9209 rshift, dest_high,
9210 extend_unsigned);
9211 if (temp != dest_high)
9212 emit_move_insn (dest_high, temp);
9214 /* dest_low = src_low << C. */
9215 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9216 treeop1, dest_low, unsignedp);
9217 if (temp != dest_low)
9218 emit_move_insn (dest_low, temp);
9220 seq = get_insns ();
9221 end_sequence ();
9222 temp = target ;
9224 if (have_insn_for (ASHIFT, int_mode))
9226 bool speed_p = optimize_insn_for_speed_p ();
9227 start_sequence ();
9228 rtx ret_old = expand_variable_shift (code, int_mode,
9229 op0, treeop1,
9230 target,
9231 unsignedp);
9233 seq_old = get_insns ();
9234 end_sequence ();
9235 if (seq_cost (seq, speed_p)
9236 >= seq_cost (seq_old, speed_p))
9238 seq = seq_old;
9239 temp = ret_old;
9242 emit_insn (seq);
9247 if (temp == NULL_RTX)
9248 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9249 unsignedp);
9250 if (code == LSHIFT_EXPR)
9251 temp = REDUCE_BIT_FIELD (temp);
9252 return temp;
9255 /* Could determine the answer when only additive constants differ. Also,
9256 the addition of one can be handled by changing the condition. */
9257 case LT_EXPR:
9258 case LE_EXPR:
9259 case GT_EXPR:
9260 case GE_EXPR:
9261 case EQ_EXPR:
9262 case NE_EXPR:
9263 case UNORDERED_EXPR:
9264 case ORDERED_EXPR:
9265 case UNLT_EXPR:
9266 case UNLE_EXPR:
9267 case UNGT_EXPR:
9268 case UNGE_EXPR:
9269 case UNEQ_EXPR:
9270 case LTGT_EXPR:
9272 temp = do_store_flag (ops,
9273 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9274 tmode != VOIDmode ? tmode : mode);
9275 if (temp)
9276 return temp;
9278 /* Use a compare and a jump for BLKmode comparisons, or for function
9279 type comparisons is have_canonicalize_funcptr_for_compare. */
9281 if ((target == 0
9282 || modifier == EXPAND_STACK_PARM
9283 || ! safe_from_p (target, treeop0, 1)
9284 || ! safe_from_p (target, treeop1, 1)
9285 /* Make sure we don't have a hard reg (such as function's return
9286 value) live across basic blocks, if not optimizing. */
9287 || (!optimize && REG_P (target)
9288 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9289 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9291 emit_move_insn (target, const0_rtx);
9293 rtx_code_label *lab1 = gen_label_rtx ();
9294 jumpifnot_1 (code, treeop0, treeop1, lab1,
9295 profile_probability::uninitialized ());
9297 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9298 emit_move_insn (target, constm1_rtx);
9299 else
9300 emit_move_insn (target, const1_rtx);
9302 emit_label (lab1);
9303 return target;
9305 case COMPLEX_EXPR:
9306 /* Get the rtx code of the operands. */
9307 op0 = expand_normal (treeop0);
9308 op1 = expand_normal (treeop1);
9310 if (!target)
9311 target = gen_reg_rtx (TYPE_MODE (type));
9312 else
9313 /* If target overlaps with op1, then either we need to force
9314 op1 into a pseudo (if target also overlaps with op0),
9315 or write the complex parts in reverse order. */
9316 switch (GET_CODE (target))
9318 case CONCAT:
9319 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9321 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9323 complex_expr_force_op1:
9324 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9325 emit_move_insn (temp, op1);
9326 op1 = temp;
9327 break;
9329 complex_expr_swap_order:
9330 /* Move the imaginary (op1) and real (op0) parts to their
9331 location. */
9332 write_complex_part (target, op1, true);
9333 write_complex_part (target, op0, false);
9335 return target;
9337 break;
9338 case MEM:
9339 temp = adjust_address_nv (target,
9340 GET_MODE_INNER (GET_MODE (target)), 0);
9341 if (reg_overlap_mentioned_p (temp, op1))
9343 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9344 temp = adjust_address_nv (target, imode,
9345 GET_MODE_SIZE (imode));
9346 if (reg_overlap_mentioned_p (temp, op0))
9347 goto complex_expr_force_op1;
9348 goto complex_expr_swap_order;
9350 break;
9351 default:
9352 if (reg_overlap_mentioned_p (target, op1))
9354 if (reg_overlap_mentioned_p (target, op0))
9355 goto complex_expr_force_op1;
9356 goto complex_expr_swap_order;
9358 break;
9361 /* Move the real (op0) and imaginary (op1) parts to their location. */
9362 write_complex_part (target, op0, false);
9363 write_complex_part (target, op1, true);
9365 return target;
9367 case WIDEN_SUM_EXPR:
9369 tree oprnd0 = treeop0;
9370 tree oprnd1 = treeop1;
9372 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9373 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9374 target, unsignedp);
9375 return target;
9378 case REDUC_MAX_EXPR:
9379 case REDUC_MIN_EXPR:
9380 case REDUC_PLUS_EXPR:
9382 op0 = expand_normal (treeop0);
9383 this_optab = optab_for_tree_code (code, type, optab_default);
9384 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9386 struct expand_operand ops[2];
9387 enum insn_code icode = optab_handler (this_optab, vec_mode);
9389 create_output_operand (&ops[0], target, mode);
9390 create_input_operand (&ops[1], op0, vec_mode);
9391 expand_insn (icode, 2, ops);
9392 target = ops[0].value;
9393 if (GET_MODE (target) != mode)
9394 return gen_lowpart (tmode, target);
9395 return target;
9398 case VEC_UNPACK_HI_EXPR:
9399 case VEC_UNPACK_LO_EXPR:
9401 op0 = expand_normal (treeop0);
9402 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9403 target, unsignedp);
9404 gcc_assert (temp);
9405 return temp;
9408 case VEC_UNPACK_FLOAT_HI_EXPR:
9409 case VEC_UNPACK_FLOAT_LO_EXPR:
9411 op0 = expand_normal (treeop0);
9412 /* The signedness is determined from input operand. */
9413 temp = expand_widen_pattern_expr
9414 (ops, op0, NULL_RTX, NULL_RTX,
9415 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9417 gcc_assert (temp);
9418 return temp;
9421 case VEC_WIDEN_MULT_HI_EXPR:
9422 case VEC_WIDEN_MULT_LO_EXPR:
9423 case VEC_WIDEN_MULT_EVEN_EXPR:
9424 case VEC_WIDEN_MULT_ODD_EXPR:
9425 case VEC_WIDEN_LSHIFT_HI_EXPR:
9426 case VEC_WIDEN_LSHIFT_LO_EXPR:
9427 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9428 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9429 target, unsignedp);
9430 gcc_assert (target);
9431 return target;
9433 case VEC_PACK_TRUNC_EXPR:
9434 case VEC_PACK_SAT_EXPR:
9435 case VEC_PACK_FIX_TRUNC_EXPR:
9436 mode = TYPE_MODE (TREE_TYPE (treeop0));
9437 goto binop;
9439 case VEC_PERM_EXPR:
9440 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9441 op2 = expand_normal (treeop2);
9443 /* Careful here: if the target doesn't support integral vector modes,
9444 a constant selection vector could wind up smooshed into a normal
9445 integral constant. */
9446 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9448 tree sel_type = TREE_TYPE (treeop2);
9449 machine_mode vmode
9450 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9451 TYPE_VECTOR_SUBPARTS (sel_type));
9452 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9453 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9454 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9456 else
9457 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9459 temp = expand_vec_perm (mode, op0, op1, op2, target);
9460 gcc_assert (temp);
9461 return temp;
9463 case DOT_PROD_EXPR:
9465 tree oprnd0 = treeop0;
9466 tree oprnd1 = treeop1;
9467 tree oprnd2 = treeop2;
9468 rtx op2;
9470 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9471 op2 = expand_normal (oprnd2);
9472 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9473 target, unsignedp);
9474 return target;
9477 case SAD_EXPR:
9479 tree oprnd0 = treeop0;
9480 tree oprnd1 = treeop1;
9481 tree oprnd2 = treeop2;
9482 rtx op2;
9484 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9485 op2 = expand_normal (oprnd2);
9486 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9487 target, unsignedp);
9488 return target;
9491 case REALIGN_LOAD_EXPR:
9493 tree oprnd0 = treeop0;
9494 tree oprnd1 = treeop1;
9495 tree oprnd2 = treeop2;
9496 rtx op2;
9498 this_optab = optab_for_tree_code (code, type, optab_default);
9499 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9500 op2 = expand_normal (oprnd2);
9501 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9502 target, unsignedp);
9503 gcc_assert (temp);
9504 return temp;
9507 case COND_EXPR:
9509 /* A COND_EXPR with its type being VOID_TYPE represents a
9510 conditional jump and is handled in
9511 expand_gimple_cond_expr. */
9512 gcc_assert (!VOID_TYPE_P (type));
9514 /* Note that COND_EXPRs whose type is a structure or union
9515 are required to be constructed to contain assignments of
9516 a temporary variable, so that we can evaluate them here
9517 for side effect only. If type is void, we must do likewise. */
9519 gcc_assert (!TREE_ADDRESSABLE (type)
9520 && !ignore
9521 && TREE_TYPE (treeop1) != void_type_node
9522 && TREE_TYPE (treeop2) != void_type_node);
9524 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9525 if (temp)
9526 return temp;
9528 /* If we are not to produce a result, we have no target. Otherwise,
9529 if a target was specified use it; it will not be used as an
9530 intermediate target unless it is safe. If no target, use a
9531 temporary. */
9533 if (modifier != EXPAND_STACK_PARM
9534 && original_target
9535 && safe_from_p (original_target, treeop0, 1)
9536 && GET_MODE (original_target) == mode
9537 && !MEM_P (original_target))
9538 temp = original_target;
9539 else
9540 temp = assign_temp (type, 0, 1);
9542 do_pending_stack_adjust ();
9543 NO_DEFER_POP;
9544 rtx_code_label *lab0 = gen_label_rtx ();
9545 rtx_code_label *lab1 = gen_label_rtx ();
9546 jumpifnot (treeop0, lab0,
9547 profile_probability::uninitialized ());
9548 store_expr (treeop1, temp,
9549 modifier == EXPAND_STACK_PARM,
9550 false, false);
9552 emit_jump_insn (targetm.gen_jump (lab1));
9553 emit_barrier ();
9554 emit_label (lab0);
9555 store_expr (treeop2, temp,
9556 modifier == EXPAND_STACK_PARM,
9557 false, false);
9559 emit_label (lab1);
9560 OK_DEFER_POP;
9561 return temp;
9564 case VEC_COND_EXPR:
9565 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9566 return target;
9568 case BIT_INSERT_EXPR:
9570 unsigned bitpos = tree_to_uhwi (treeop2);
9571 unsigned bitsize;
9572 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9573 bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9574 else
9575 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9576 rtx op0 = expand_normal (treeop0);
9577 rtx op1 = expand_normal (treeop1);
9578 rtx dst = gen_reg_rtx (mode);
9579 emit_move_insn (dst, op0);
9580 store_bit_field (dst, bitsize, bitpos, 0, 0,
9581 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9582 return dst;
9585 default:
9586 gcc_unreachable ();
9589 /* Here to do an ordinary binary operator. */
9590 binop:
9591 expand_operands (treeop0, treeop1,
9592 subtarget, &op0, &op1, EXPAND_NORMAL);
9593 binop2:
9594 this_optab = optab_for_tree_code (code, type, optab_default);
9595 binop3:
9596 if (modifier == EXPAND_STACK_PARM)
9597 target = 0;
9598 temp = expand_binop (mode, this_optab, op0, op1, target,
9599 unsignedp, OPTAB_LIB_WIDEN);
9600 gcc_assert (temp);
9601 /* Bitwise operations do not need bitfield reduction as we expect their
9602 operands being properly truncated. */
9603 if (code == BIT_XOR_EXPR
9604 || code == BIT_AND_EXPR
9605 || code == BIT_IOR_EXPR)
9606 return temp;
9607 return REDUCE_BIT_FIELD (temp);
9609 #undef REDUCE_BIT_FIELD
9612 /* Return TRUE if expression STMT is suitable for replacement.
9613 Never consider memory loads as replaceable, because those don't ever lead
9614 into constant expressions. */
9616 static bool
9617 stmt_is_replaceable_p (gimple *stmt)
9619 if (ssa_is_replaceable_p (stmt))
9621 /* Don't move around loads. */
9622 if (!gimple_assign_single_p (stmt)
9623 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9624 return true;
9626 return false;
9630 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9631 enum expand_modifier modifier, rtx *alt_rtl,
9632 bool inner_reference_p)
9634 rtx op0, op1, temp, decl_rtl;
9635 tree type;
9636 int unsignedp;
9637 machine_mode mode, dmode;
9638 enum tree_code code = TREE_CODE (exp);
9639 rtx subtarget, original_target;
9640 int ignore;
9641 tree context;
9642 bool reduce_bit_field;
9643 location_t loc = EXPR_LOCATION (exp);
9644 struct separate_ops ops;
9645 tree treeop0, treeop1, treeop2;
9646 tree ssa_name = NULL_TREE;
9647 gimple *g;
9649 type = TREE_TYPE (exp);
9650 mode = TYPE_MODE (type);
9651 unsignedp = TYPE_UNSIGNED (type);
9653 treeop0 = treeop1 = treeop2 = NULL_TREE;
9654 if (!VL_EXP_CLASS_P (exp))
9655 switch (TREE_CODE_LENGTH (code))
9657 default:
9658 case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
9659 case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
9660 case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
9661 case 0: break;
9663 ops.code = code;
9664 ops.type = type;
9665 ops.op0 = treeop0;
9666 ops.op1 = treeop1;
9667 ops.op2 = treeop2;
9668 ops.location = loc;
9670 ignore = (target == const0_rtx
9671 || ((CONVERT_EXPR_CODE_P (code)
9672 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9673 && TREE_CODE (type) == VOID_TYPE));
9675 /* An operation in what may be a bit-field type needs the
9676 result to be reduced to the precision of the bit-field type,
9677 which is narrower than that of the type's mode. */
9678 reduce_bit_field = (!ignore
9679 && INTEGRAL_TYPE_P (type)
9680 && !type_has_mode_precision_p (type));
9682 /* If we are going to ignore this result, we need only do something
9683 if there is a side-effect somewhere in the expression. If there
9684 is, short-circuit the most common cases here. Note that we must
9685 not call expand_expr with anything but const0_rtx in case this
9686 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9688 if (ignore)
9690 if (! TREE_SIDE_EFFECTS (exp))
9691 return const0_rtx;
9693 /* Ensure we reference a volatile object even if value is ignored, but
9694 don't do this if all we are doing is taking its address. */
9695 if (TREE_THIS_VOLATILE (exp)
9696 && TREE_CODE (exp) != FUNCTION_DECL
9697 && mode != VOIDmode && mode != BLKmode
9698 && modifier != EXPAND_CONST_ADDRESS)
9700 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9701 if (MEM_P (temp))
9702 copy_to_reg (temp);
9703 return const0_rtx;
9706 if (TREE_CODE_CLASS (code) == tcc_unary
9707 || code == BIT_FIELD_REF
9708 || code == COMPONENT_REF
9709 || code == INDIRECT_REF)
9710 return expand_expr (treeop0, const0_rtx, VOIDmode,
9711 modifier);
9713 else if (TREE_CODE_CLASS (code) == tcc_binary
9714 || TREE_CODE_CLASS (code) == tcc_comparison
9715 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9717 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9718 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9719 return const0_rtx;
9722 target = 0;
9725 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9726 target = 0;
9728 /* Use subtarget as the target for operand 0 of a binary operation. */
9729 subtarget = get_subtarget (target);
9730 original_target = target;
9732 switch (code)
9734 case LABEL_DECL:
9736 tree function = decl_function_context (exp);
9738 temp = label_rtx (exp);
9739 temp = gen_rtx_LABEL_REF (Pmode, temp);
9741 if (function != current_function_decl
9742 && function != 0)
9743 LABEL_REF_NONLOCAL_P (temp) = 1;
9745 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9746 return temp;
9749 case SSA_NAME:
9750 /* ??? ivopts calls expander, without any preparation from
9751 out-of-ssa. So fake instructions as if this was an access to the
9752 base variable. This unnecessarily allocates a pseudo, see how we can
9753 reuse it, if partition base vars have it set already. */
9754 if (!currently_expanding_to_rtl)
9756 tree var = SSA_NAME_VAR (exp);
9757 if (var && DECL_RTL_SET_P (var))
9758 return DECL_RTL (var);
9759 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9760 LAST_VIRTUAL_REGISTER + 1);
9763 g = get_gimple_for_ssa_name (exp);
9764 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9765 if (g == NULL
9766 && modifier == EXPAND_INITIALIZER
9767 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9768 && (optimize || !SSA_NAME_VAR (exp)
9769 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9770 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9771 g = SSA_NAME_DEF_STMT (exp);
9772 if (g)
9774 rtx r;
9775 location_t saved_loc = curr_insn_location ();
9776 location_t loc = gimple_location (g);
9777 if (loc != UNKNOWN_LOCATION)
9778 set_curr_insn_location (loc);
9779 ops.code = gimple_assign_rhs_code (g);
9780 switch (get_gimple_rhs_class (ops.code))
9782 case GIMPLE_TERNARY_RHS:
9783 ops.op2 = gimple_assign_rhs3 (g);
9784 /* Fallthru */
9785 case GIMPLE_BINARY_RHS:
9786 ops.op1 = gimple_assign_rhs2 (g);
9788 /* Try to expand conditonal compare. */
9789 if (targetm.gen_ccmp_first)
9791 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9792 r = expand_ccmp_expr (g, mode);
9793 if (r)
9794 break;
9796 /* Fallthru */
9797 case GIMPLE_UNARY_RHS:
9798 ops.op0 = gimple_assign_rhs1 (g);
9799 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9800 ops.location = loc;
9801 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9802 break;
9803 case GIMPLE_SINGLE_RHS:
9805 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9806 tmode, modifier, alt_rtl,
9807 inner_reference_p);
9808 break;
9810 default:
9811 gcc_unreachable ();
9813 set_curr_insn_location (saved_loc);
9814 if (REG_P (r) && !REG_EXPR (r))
9815 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9816 return r;
9819 ssa_name = exp;
9820 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9821 exp = SSA_NAME_VAR (ssa_name);
9822 goto expand_decl_rtl;
9824 case PARM_DECL:
9825 case VAR_DECL:
9826 /* If a static var's type was incomplete when the decl was written,
9827 but the type is complete now, lay out the decl now. */
9828 if (DECL_SIZE (exp) == 0
9829 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9830 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9831 layout_decl (exp, 0);
9833 /* fall through */
9835 case FUNCTION_DECL:
9836 case RESULT_DECL:
9837 decl_rtl = DECL_RTL (exp);
9838 expand_decl_rtl:
9839 gcc_assert (decl_rtl);
9841 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9842 settings for VECTOR_TYPE_P that might switch for the function. */
9843 if (currently_expanding_to_rtl
9844 && code == VAR_DECL && MEM_P (decl_rtl)
9845 && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
9846 decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
9847 else
9848 decl_rtl = copy_rtx (decl_rtl);
9850 /* Record writes to register variables. */
9851 if (modifier == EXPAND_WRITE
9852 && REG_P (decl_rtl)
9853 && HARD_REGISTER_P (decl_rtl))
9854 add_to_hard_reg_set (&crtl->asm_clobbers,
9855 GET_MODE (decl_rtl), REGNO (decl_rtl));
9857 /* Ensure variable marked as used even if it doesn't go through
9858 a parser. If it hasn't be used yet, write out an external
9859 definition. */
9860 if (exp)
9861 TREE_USED (exp) = 1;
9863 /* Show we haven't gotten RTL for this yet. */
9864 temp = 0;
9866 /* Variables inherited from containing functions should have
9867 been lowered by this point. */
9868 if (exp)
9869 context = decl_function_context (exp);
9870 gcc_assert (!exp
9871 || SCOPE_FILE_SCOPE_P (context)
9872 || context == current_function_decl
9873 || TREE_STATIC (exp)
9874 || DECL_EXTERNAL (exp)
9875 /* ??? C++ creates functions that are not TREE_STATIC. */
9876 || TREE_CODE (exp) == FUNCTION_DECL);
9878 /* This is the case of an array whose size is to be determined
9879 from its initializer, while the initializer is still being parsed.
9880 ??? We aren't parsing while expanding anymore. */
9882 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9883 temp = validize_mem (decl_rtl);
9885 /* If DECL_RTL is memory, we are in the normal case and the
9886 address is not valid, get the address into a register. */
9888 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9890 if (alt_rtl)
9891 *alt_rtl = decl_rtl;
9892 decl_rtl = use_anchored_address (decl_rtl);
9893 if (modifier != EXPAND_CONST_ADDRESS
9894 && modifier != EXPAND_SUM
9895 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9896 : GET_MODE (decl_rtl),
9897 XEXP (decl_rtl, 0),
9898 MEM_ADDR_SPACE (decl_rtl)))
9899 temp = replace_equiv_address (decl_rtl,
9900 copy_rtx (XEXP (decl_rtl, 0)));
9903 /* If we got something, return it. But first, set the alignment
9904 if the address is a register. */
9905 if (temp != 0)
9907 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9908 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9910 return temp;
9913 if (exp)
9914 dmode = DECL_MODE (exp);
9915 else
9916 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9918 /* If the mode of DECL_RTL does not match that of the decl,
9919 there are two cases: we are dealing with a BLKmode value
9920 that is returned in a register, or we are dealing with
9921 a promoted value. In the latter case, return a SUBREG
9922 of the wanted mode, but mark it so that we know that it
9923 was already extended. */
9924 if (REG_P (decl_rtl)
9925 && dmode != BLKmode
9926 && GET_MODE (decl_rtl) != dmode)
9928 machine_mode pmode;
9930 /* Get the signedness to be used for this variable. Ensure we get
9931 the same mode we got when the variable was declared. */
9932 if (code != SSA_NAME)
9933 pmode = promote_decl_mode (exp, &unsignedp);
9934 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9935 && gimple_code (g) == GIMPLE_CALL
9936 && !gimple_call_internal_p (g))
9937 pmode = promote_function_mode (type, mode, &unsignedp,
9938 gimple_call_fntype (g),
9940 else
9941 pmode = promote_ssa_mode (ssa_name, &unsignedp);
9942 gcc_assert (GET_MODE (decl_rtl) == pmode);
9944 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9945 SUBREG_PROMOTED_VAR_P (temp) = 1;
9946 SUBREG_PROMOTED_SET (temp, unsignedp);
9947 return temp;
9950 return decl_rtl;
9952 case INTEGER_CST:
9954 /* Given that TYPE_PRECISION (type) is not always equal to
9955 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9956 the former to the latter according to the signedness of the
9957 type. */
9958 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
9959 temp = immed_wide_int_const
9960 (wi::to_wide (exp, GET_MODE_PRECISION (mode)), mode);
9961 return temp;
9964 case VECTOR_CST:
9966 tree tmp = NULL_TREE;
9967 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9968 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9969 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9970 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9971 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9972 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9973 return const_vector_from_tree (exp);
9974 scalar_int_mode int_mode;
9975 if (is_int_mode (mode, &int_mode))
9977 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
9978 return const_scalar_mask_from_tree (exp);
9979 else
9981 tree type_for_mode
9982 = lang_hooks.types.type_for_mode (int_mode, 1);
9983 if (type_for_mode)
9984 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
9985 type_for_mode, exp);
9988 if (!tmp)
9990 vec<constructor_elt, va_gc> *v;
9991 unsigned i;
9992 vec_alloc (v, VECTOR_CST_NELTS (exp));
9993 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9994 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9995 tmp = build_constructor (type, v);
9997 return expand_expr (tmp, ignore ? const0_rtx : target,
9998 tmode, modifier);
10001 case CONST_DECL:
10002 if (modifier == EXPAND_WRITE)
10004 /* Writing into CONST_DECL is always invalid, but handle it
10005 gracefully. */
10006 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10007 machine_mode address_mode = targetm.addr_space.address_mode (as);
10008 op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10009 EXPAND_NORMAL, as);
10010 op0 = memory_address_addr_space (mode, op0, as);
10011 temp = gen_rtx_MEM (mode, op0);
10012 set_mem_addr_space (temp, as);
10013 return temp;
10015 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10017 case REAL_CST:
10018 /* If optimized, generate immediate CONST_DOUBLE
10019 which will be turned into memory by reload if necessary.
10021 We used to force a register so that loop.c could see it. But
10022 this does not allow gen_* patterns to perform optimizations with
10023 the constants. It also produces two insns in cases like "x = 1.0;".
10024 On most machines, floating-point constants are not permitted in
10025 many insns, so we'd end up copying it to a register in any case.
10027 Now, we do the copying in expand_binop, if appropriate. */
10028 return const_double_from_real_value (TREE_REAL_CST (exp),
10029 TYPE_MODE (TREE_TYPE (exp)));
10031 case FIXED_CST:
10032 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10033 TYPE_MODE (TREE_TYPE (exp)));
10035 case COMPLEX_CST:
10036 /* Handle evaluating a complex constant in a CONCAT target. */
10037 if (original_target && GET_CODE (original_target) == CONCAT)
10039 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10040 rtx rtarg, itarg;
10042 rtarg = XEXP (original_target, 0);
10043 itarg = XEXP (original_target, 1);
10045 /* Move the real and imaginary parts separately. */
10046 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10047 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10049 if (op0 != rtarg)
10050 emit_move_insn (rtarg, op0);
10051 if (op1 != itarg)
10052 emit_move_insn (itarg, op1);
10054 return original_target;
10057 /* fall through */
10059 case STRING_CST:
10060 temp = expand_expr_constant (exp, 1, modifier);
10062 /* temp contains a constant address.
10063 On RISC machines where a constant address isn't valid,
10064 make some insns to get that address into a register. */
10065 if (modifier != EXPAND_CONST_ADDRESS
10066 && modifier != EXPAND_INITIALIZER
10067 && modifier != EXPAND_SUM
10068 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10069 MEM_ADDR_SPACE (temp)))
10070 return replace_equiv_address (temp,
10071 copy_rtx (XEXP (temp, 0)));
10072 return temp;
10074 case SAVE_EXPR:
10076 tree val = treeop0;
10077 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10078 inner_reference_p);
10080 if (!SAVE_EXPR_RESOLVED_P (exp))
10082 /* We can indeed still hit this case, typically via builtin
10083 expanders calling save_expr immediately before expanding
10084 something. Assume this means that we only have to deal
10085 with non-BLKmode values. */
10086 gcc_assert (GET_MODE (ret) != BLKmode);
10088 val = build_decl (curr_insn_location (),
10089 VAR_DECL, NULL, TREE_TYPE (exp));
10090 DECL_ARTIFICIAL (val) = 1;
10091 DECL_IGNORED_P (val) = 1;
10092 treeop0 = val;
10093 TREE_OPERAND (exp, 0) = treeop0;
10094 SAVE_EXPR_RESOLVED_P (exp) = 1;
10096 if (!CONSTANT_P (ret))
10097 ret = copy_to_reg (ret);
10098 SET_DECL_RTL (val, ret);
10101 return ret;
10105 case CONSTRUCTOR:
10106 /* If we don't need the result, just ensure we evaluate any
10107 subexpressions. */
10108 if (ignore)
10110 unsigned HOST_WIDE_INT idx;
10111 tree value;
10113 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10114 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10116 return const0_rtx;
10119 return expand_constructor (exp, target, modifier, false);
10121 case TARGET_MEM_REF:
10123 addr_space_t as
10124 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10125 enum insn_code icode;
10126 unsigned int align;
10128 op0 = addr_for_mem_ref (exp, as, true);
10129 op0 = memory_address_addr_space (mode, op0, as);
10130 temp = gen_rtx_MEM (mode, op0);
10131 set_mem_attributes (temp, exp, 0);
10132 set_mem_addr_space (temp, as);
10133 align = get_object_alignment (exp);
10134 if (modifier != EXPAND_WRITE
10135 && modifier != EXPAND_MEMORY
10136 && mode != BLKmode
10137 && align < GET_MODE_ALIGNMENT (mode)
10138 /* If the target does not have special handling for unaligned
10139 loads of mode then it can use regular moves for them. */
10140 && ((icode = optab_handler (movmisalign_optab, mode))
10141 != CODE_FOR_nothing))
10143 struct expand_operand ops[2];
10145 /* We've already validated the memory, and we're creating a
10146 new pseudo destination. The predicates really can't fail,
10147 nor can the generator. */
10148 create_output_operand (&ops[0], NULL_RTX, mode);
10149 create_fixed_operand (&ops[1], temp);
10150 expand_insn (icode, 2, ops);
10151 temp = ops[0].value;
10153 return temp;
10156 case MEM_REF:
10158 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10159 addr_space_t as
10160 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10161 machine_mode address_mode;
10162 tree base = TREE_OPERAND (exp, 0);
10163 gimple *def_stmt;
10164 enum insn_code icode;
10165 unsigned align;
10166 /* Handle expansion of non-aliased memory with non-BLKmode. That
10167 might end up in a register. */
10168 if (mem_ref_refers_to_non_mem_p (exp))
10170 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
10171 base = TREE_OPERAND (base, 0);
10172 if (offset == 0
10173 && !reverse
10174 && tree_fits_uhwi_p (TYPE_SIZE (type))
10175 && (GET_MODE_BITSIZE (DECL_MODE (base))
10176 == tree_to_uhwi (TYPE_SIZE (type))))
10177 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10178 target, tmode, modifier);
10179 if (TYPE_MODE (type) == BLKmode)
10181 temp = assign_stack_temp (DECL_MODE (base),
10182 GET_MODE_SIZE (DECL_MODE (base)));
10183 store_expr (base, temp, 0, false, false);
10184 temp = adjust_address (temp, BLKmode, offset);
10185 set_mem_size (temp, int_size_in_bytes (type));
10186 return temp;
10188 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10189 bitsize_int (offset * BITS_PER_UNIT));
10190 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10191 return expand_expr (exp, target, tmode, modifier);
10193 address_mode = targetm.addr_space.address_mode (as);
10194 base = TREE_OPERAND (exp, 0);
10195 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10197 tree mask = gimple_assign_rhs2 (def_stmt);
10198 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10199 gimple_assign_rhs1 (def_stmt), mask);
10200 TREE_OPERAND (exp, 0) = base;
10202 align = get_object_alignment (exp);
10203 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10204 op0 = memory_address_addr_space (mode, op0, as);
10205 if (!integer_zerop (TREE_OPERAND (exp, 1)))
10207 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10208 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10209 op0 = memory_address_addr_space (mode, op0, as);
10211 temp = gen_rtx_MEM (mode, op0);
10212 set_mem_attributes (temp, exp, 0);
10213 set_mem_addr_space (temp, as);
10214 if (TREE_THIS_VOLATILE (exp))
10215 MEM_VOLATILE_P (temp) = 1;
10216 if (modifier != EXPAND_WRITE
10217 && modifier != EXPAND_MEMORY
10218 && !inner_reference_p
10219 && mode != BLKmode
10220 && align < GET_MODE_ALIGNMENT (mode))
10222 if ((icode = optab_handler (movmisalign_optab, mode))
10223 != CODE_FOR_nothing)
10225 struct expand_operand ops[2];
10227 /* We've already validated the memory, and we're creating a
10228 new pseudo destination. The predicates really can't fail,
10229 nor can the generator. */
10230 create_output_operand (&ops[0], NULL_RTX, mode);
10231 create_fixed_operand (&ops[1], temp);
10232 expand_insn (icode, 2, ops);
10233 temp = ops[0].value;
10235 else if (SLOW_UNALIGNED_ACCESS (mode, align))
10236 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10237 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10238 (modifier == EXPAND_STACK_PARM
10239 ? NULL_RTX : target),
10240 mode, mode, false, alt_rtl);
10242 if (reverse
10243 && modifier != EXPAND_MEMORY
10244 && modifier != EXPAND_WRITE)
10245 temp = flip_storage_order (mode, temp);
10246 return temp;
10249 case ARRAY_REF:
10252 tree array = treeop0;
10253 tree index = treeop1;
10254 tree init;
10256 /* Fold an expression like: "foo"[2].
10257 This is not done in fold so it won't happen inside &.
10258 Don't fold if this is for wide characters since it's too
10259 difficult to do correctly and this is a very rare case. */
10261 if (modifier != EXPAND_CONST_ADDRESS
10262 && modifier != EXPAND_INITIALIZER
10263 && modifier != EXPAND_MEMORY)
10265 tree t = fold_read_from_constant_string (exp);
10267 if (t)
10268 return expand_expr (t, target, tmode, modifier);
10271 /* If this is a constant index into a constant array,
10272 just get the value from the array. Handle both the cases when
10273 we have an explicit constructor and when our operand is a variable
10274 that was declared const. */
10276 if (modifier != EXPAND_CONST_ADDRESS
10277 && modifier != EXPAND_INITIALIZER
10278 && modifier != EXPAND_MEMORY
10279 && TREE_CODE (array) == CONSTRUCTOR
10280 && ! TREE_SIDE_EFFECTS (array)
10281 && TREE_CODE (index) == INTEGER_CST)
10283 unsigned HOST_WIDE_INT ix;
10284 tree field, value;
10286 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10287 field, value)
10288 if (tree_int_cst_equal (field, index))
10290 if (!TREE_SIDE_EFFECTS (value))
10291 return expand_expr (fold (value), target, tmode, modifier);
10292 break;
10296 else if (optimize >= 1
10297 && modifier != EXPAND_CONST_ADDRESS
10298 && modifier != EXPAND_INITIALIZER
10299 && modifier != EXPAND_MEMORY
10300 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10301 && TREE_CODE (index) == INTEGER_CST
10302 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10303 && (init = ctor_for_folding (array)) != error_mark_node)
10305 if (init == NULL_TREE)
10307 tree value = build_zero_cst (type);
10308 if (TREE_CODE (value) == CONSTRUCTOR)
10310 /* If VALUE is a CONSTRUCTOR, this optimization is only
10311 useful if this doesn't store the CONSTRUCTOR into
10312 memory. If it does, it is more efficient to just
10313 load the data from the array directly. */
10314 rtx ret = expand_constructor (value, target,
10315 modifier, true);
10316 if (ret == NULL_RTX)
10317 value = NULL_TREE;
10320 if (value)
10321 return expand_expr (value, target, tmode, modifier);
10323 else if (TREE_CODE (init) == CONSTRUCTOR)
10325 unsigned HOST_WIDE_INT ix;
10326 tree field, value;
10328 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10329 field, value)
10330 if (tree_int_cst_equal (field, index))
10332 if (TREE_SIDE_EFFECTS (value))
10333 break;
10335 if (TREE_CODE (value) == CONSTRUCTOR)
10337 /* If VALUE is a CONSTRUCTOR, this
10338 optimization is only useful if
10339 this doesn't store the CONSTRUCTOR
10340 into memory. If it does, it is more
10341 efficient to just load the data from
10342 the array directly. */
10343 rtx ret = expand_constructor (value, target,
10344 modifier, true);
10345 if (ret == NULL_RTX)
10346 break;
10349 return
10350 expand_expr (fold (value), target, tmode, modifier);
10353 else if (TREE_CODE (init) == STRING_CST)
10355 tree low_bound = array_ref_low_bound (exp);
10356 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10358 /* Optimize the special case of a zero lower bound.
10360 We convert the lower bound to sizetype to avoid problems
10361 with constant folding. E.g. suppose the lower bound is
10362 1 and its mode is QI. Without the conversion
10363 (ARRAY + (INDEX - (unsigned char)1))
10364 becomes
10365 (ARRAY + (-(unsigned char)1) + INDEX)
10366 which becomes
10367 (ARRAY + 255 + INDEX). Oops! */
10368 if (!integer_zerop (low_bound))
10369 index1 = size_diffop_loc (loc, index1,
10370 fold_convert_loc (loc, sizetype,
10371 low_bound));
10373 if (tree_fits_uhwi_p (index1)
10374 && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10376 tree type = TREE_TYPE (TREE_TYPE (init));
10377 scalar_int_mode mode;
10379 if (is_int_mode (TYPE_MODE (type), &mode)
10380 && GET_MODE_SIZE (mode) == 1)
10381 return gen_int_mode (TREE_STRING_POINTER (init)
10382 [TREE_INT_CST_LOW (index1)],
10383 mode);
10388 goto normal_inner_ref;
10390 case COMPONENT_REF:
10391 /* If the operand is a CONSTRUCTOR, we can just extract the
10392 appropriate field if it is present. */
10393 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10395 unsigned HOST_WIDE_INT idx;
10396 tree field, value;
10397 scalar_int_mode field_mode;
10399 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10400 idx, field, value)
10401 if (field == treeop1
10402 /* We can normally use the value of the field in the
10403 CONSTRUCTOR. However, if this is a bitfield in
10404 an integral mode that we can fit in a HOST_WIDE_INT,
10405 we must mask only the number of bits in the bitfield,
10406 since this is done implicitly by the constructor. If
10407 the bitfield does not meet either of those conditions,
10408 we can't do this optimization. */
10409 && (! DECL_BIT_FIELD (field)
10410 || (is_int_mode (DECL_MODE (field), &field_mode)
10411 && (GET_MODE_PRECISION (field_mode)
10412 <= HOST_BITS_PER_WIDE_INT))))
10414 if (DECL_BIT_FIELD (field)
10415 && modifier == EXPAND_STACK_PARM)
10416 target = 0;
10417 op0 = expand_expr (value, target, tmode, modifier);
10418 if (DECL_BIT_FIELD (field))
10420 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10421 scalar_int_mode imode
10422 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10424 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10426 op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10427 imode);
10428 op0 = expand_and (imode, op0, op1, target);
10430 else
10432 int count = GET_MODE_PRECISION (imode) - bitsize;
10434 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10435 target, 0);
10436 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10437 target, 0);
10441 return op0;
10444 goto normal_inner_ref;
10446 case BIT_FIELD_REF:
10447 case ARRAY_RANGE_REF:
10448 normal_inner_ref:
10450 machine_mode mode1, mode2;
10451 HOST_WIDE_INT bitsize, bitpos;
10452 tree offset;
10453 int reversep, volatilep = 0, must_force_mem;
10454 tree tem
10455 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10456 &unsignedp, &reversep, &volatilep);
10457 rtx orig_op0, memloc;
10458 bool clear_mem_expr = false;
10460 /* If we got back the original object, something is wrong. Perhaps
10461 we are evaluating an expression too early. In any event, don't
10462 infinitely recurse. */
10463 gcc_assert (tem != exp);
10465 /* If TEM's type is a union of variable size, pass TARGET to the inner
10466 computation, since it will need a temporary and TARGET is known
10467 to have to do. This occurs in unchecked conversion in Ada. */
10468 orig_op0 = op0
10469 = expand_expr_real (tem,
10470 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10471 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10472 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10473 != INTEGER_CST)
10474 && modifier != EXPAND_STACK_PARM
10475 ? target : NULL_RTX),
10476 VOIDmode,
10477 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10478 NULL, true);
10480 /* If the field has a mode, we want to access it in the
10481 field's mode, not the computed mode.
10482 If a MEM has VOIDmode (external with incomplete type),
10483 use BLKmode for it instead. */
10484 if (MEM_P (op0))
10486 if (mode1 != VOIDmode)
10487 op0 = adjust_address (op0, mode1, 0);
10488 else if (GET_MODE (op0) == VOIDmode)
10489 op0 = adjust_address (op0, BLKmode, 0);
10492 mode2
10493 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10495 /* If we have either an offset, a BLKmode result, or a reference
10496 outside the underlying object, we must force it to memory.
10497 Such a case can occur in Ada if we have unchecked conversion
10498 of an expression from a scalar type to an aggregate type or
10499 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10500 passed a partially uninitialized object or a view-conversion
10501 to a larger size. */
10502 must_force_mem = (offset
10503 || mode1 == BLKmode
10504 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10506 /* Handle CONCAT first. */
10507 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10509 if (bitpos == 0
10510 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))
10511 && COMPLEX_MODE_P (mode1)
10512 && COMPLEX_MODE_P (GET_MODE (op0))
10513 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10514 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10516 if (reversep)
10517 op0 = flip_storage_order (GET_MODE (op0), op0);
10518 if (mode1 != GET_MODE (op0))
10520 rtx parts[2];
10521 for (int i = 0; i < 2; i++)
10523 rtx op = read_complex_part (op0, i != 0);
10524 if (GET_CODE (op) == SUBREG)
10525 op = force_reg (GET_MODE (op), op);
10526 rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10527 op);
10528 if (temp)
10529 op = temp;
10530 else
10532 if (!REG_P (op) && !MEM_P (op))
10533 op = force_reg (GET_MODE (op), op);
10534 op = gen_lowpart (GET_MODE_INNER (mode1), op);
10536 parts[i] = op;
10538 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10540 return op0;
10542 if (bitpos == 0
10543 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10544 && bitsize)
10546 op0 = XEXP (op0, 0);
10547 mode2 = GET_MODE (op0);
10549 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10550 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10551 && bitpos
10552 && bitsize)
10554 op0 = XEXP (op0, 1);
10555 bitpos = 0;
10556 mode2 = GET_MODE (op0);
10558 else
10559 /* Otherwise force into memory. */
10560 must_force_mem = 1;
10563 /* If this is a constant, put it in a register if it is a legitimate
10564 constant and we don't need a memory reference. */
10565 if (CONSTANT_P (op0)
10566 && mode2 != BLKmode
10567 && targetm.legitimate_constant_p (mode2, op0)
10568 && !must_force_mem)
10569 op0 = force_reg (mode2, op0);
10571 /* Otherwise, if this is a constant, try to force it to the constant
10572 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10573 is a legitimate constant. */
10574 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10575 op0 = validize_mem (memloc);
10577 /* Otherwise, if this is a constant or the object is not in memory
10578 and need be, put it there. */
10579 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10581 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10582 emit_move_insn (memloc, op0);
10583 op0 = memloc;
10584 clear_mem_expr = true;
10587 if (offset)
10589 machine_mode address_mode;
10590 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10591 EXPAND_SUM);
10593 gcc_assert (MEM_P (op0));
10595 address_mode = get_address_mode (op0);
10596 if (GET_MODE (offset_rtx) != address_mode)
10598 /* We cannot be sure that the RTL in offset_rtx is valid outside
10599 of a memory address context, so force it into a register
10600 before attempting to convert it to the desired mode. */
10601 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10602 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10605 /* See the comment in expand_assignment for the rationale. */
10606 if (mode1 != VOIDmode
10607 && bitpos != 0
10608 && bitsize > 0
10609 && (bitpos % bitsize) == 0
10610 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10611 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10613 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10614 bitpos = 0;
10617 op0 = offset_address (op0, offset_rtx,
10618 highest_pow2_factor (offset));
10621 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10622 record its alignment as BIGGEST_ALIGNMENT. */
10623 if (MEM_P (op0) && bitpos == 0 && offset != 0
10624 && is_aligning_offset (offset, tem))
10625 set_mem_align (op0, BIGGEST_ALIGNMENT);
10627 /* Don't forget about volatility even if this is a bitfield. */
10628 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10630 if (op0 == orig_op0)
10631 op0 = copy_rtx (op0);
10633 MEM_VOLATILE_P (op0) = 1;
10636 /* In cases where an aligned union has an unaligned object
10637 as a field, we might be extracting a BLKmode value from
10638 an integer-mode (e.g., SImode) object. Handle this case
10639 by doing the extract into an object as wide as the field
10640 (which we know to be the width of a basic mode), then
10641 storing into memory, and changing the mode to BLKmode. */
10642 if (mode1 == VOIDmode
10643 || REG_P (op0) || GET_CODE (op0) == SUBREG
10644 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10645 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10646 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10647 && modifier != EXPAND_CONST_ADDRESS
10648 && modifier != EXPAND_INITIALIZER
10649 && modifier != EXPAND_MEMORY)
10650 /* If the bitfield is volatile and the bitsize
10651 is narrower than the access size of the bitfield,
10652 we need to extract bitfields from the access. */
10653 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10654 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10655 && mode1 != BLKmode
10656 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10657 /* If the field isn't aligned enough to fetch as a memref,
10658 fetch it as a bit field. */
10659 || (mode1 != BLKmode
10660 && (((MEM_P (op0)
10661 ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10662 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)
10663 : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10664 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
10665 && modifier != EXPAND_MEMORY
10666 && ((modifier == EXPAND_CONST_ADDRESS
10667 || modifier == EXPAND_INITIALIZER)
10668 ? STRICT_ALIGNMENT
10669 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10670 || (bitpos % BITS_PER_UNIT != 0)))
10671 /* If the type and the field are a constant size and the
10672 size of the type isn't the same size as the bitfield,
10673 we must use bitfield operations. */
10674 || (bitsize >= 0
10675 && TYPE_SIZE (TREE_TYPE (exp))
10676 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10677 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10678 bitsize)))
10680 machine_mode ext_mode = mode;
10682 if (ext_mode == BLKmode
10683 && ! (target != 0 && MEM_P (op0)
10684 && MEM_P (target)
10685 && bitpos % BITS_PER_UNIT == 0))
10686 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10688 if (ext_mode == BLKmode)
10690 if (target == 0)
10691 target = assign_temp (type, 1, 1);
10693 /* ??? Unlike the similar test a few lines below, this one is
10694 very likely obsolete. */
10695 if (bitsize == 0)
10696 return target;
10698 /* In this case, BITPOS must start at a byte boundary and
10699 TARGET, if specified, must be a MEM. */
10700 gcc_assert (MEM_P (op0)
10701 && (!target || MEM_P (target))
10702 && !(bitpos % BITS_PER_UNIT));
10704 emit_block_move (target,
10705 adjust_address (op0, VOIDmode,
10706 bitpos / BITS_PER_UNIT),
10707 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10708 / BITS_PER_UNIT),
10709 (modifier == EXPAND_STACK_PARM
10710 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10712 return target;
10715 /* If we have nothing to extract, the result will be 0 for targets
10716 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10717 return 0 for the sake of consistency, as reading a zero-sized
10718 bitfield is valid in Ada and the value is fully specified. */
10719 if (bitsize == 0)
10720 return const0_rtx;
10722 op0 = validize_mem (op0);
10724 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10725 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10727 /* If the result has a record type and the extraction is done in
10728 an integral mode, then the field may be not aligned on a byte
10729 boundary; in this case, if it has reverse storage order, it
10730 needs to be extracted as a scalar field with reverse storage
10731 order and put back into memory order afterwards. */
10732 if (TREE_CODE (type) == RECORD_TYPE
10733 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10734 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10736 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10737 (modifier == EXPAND_STACK_PARM
10738 ? NULL_RTX : target),
10739 ext_mode, ext_mode, reversep, alt_rtl);
10741 /* If the result has a record type and the mode of OP0 is an
10742 integral mode then, if BITSIZE is narrower than this mode
10743 and this is for big-endian data, we must put the field
10744 into the high-order bits. And we must also put it back
10745 into memory order if it has been previously reversed. */
10746 scalar_int_mode op0_mode;
10747 if (TREE_CODE (type) == RECORD_TYPE
10748 && is_int_mode (GET_MODE (op0), &op0_mode))
10750 HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
10752 if (bitsize < size
10753 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10754 op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
10755 size - bitsize, op0, 1);
10757 if (reversep)
10758 op0 = flip_storage_order (op0_mode, op0);
10761 /* If the result type is BLKmode, store the data into a temporary
10762 of the appropriate type, but with the mode corresponding to the
10763 mode for the data we have (op0's mode). */
10764 if (mode == BLKmode)
10766 rtx new_rtx
10767 = assign_stack_temp_for_type (ext_mode,
10768 GET_MODE_BITSIZE (ext_mode),
10769 type);
10770 emit_move_insn (new_rtx, op0);
10771 op0 = copy_rtx (new_rtx);
10772 PUT_MODE (op0, BLKmode);
10775 return op0;
10778 /* If the result is BLKmode, use that to access the object
10779 now as well. */
10780 if (mode == BLKmode)
10781 mode1 = BLKmode;
10783 /* Get a reference to just this component. */
10784 if (modifier == EXPAND_CONST_ADDRESS
10785 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10786 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10787 else
10788 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10790 if (op0 == orig_op0)
10791 op0 = copy_rtx (op0);
10793 /* Don't set memory attributes if the base expression is
10794 SSA_NAME that got expanded as a MEM. In that case, we should
10795 just honor its original memory attributes. */
10796 if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10797 set_mem_attributes (op0, exp, 0);
10799 if (REG_P (XEXP (op0, 0)))
10800 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10802 /* If op0 is a temporary because the original expressions was forced
10803 to memory, clear MEM_EXPR so that the original expression cannot
10804 be marked as addressable through MEM_EXPR of the temporary. */
10805 if (clear_mem_expr)
10806 set_mem_expr (op0, NULL_TREE);
10808 MEM_VOLATILE_P (op0) |= volatilep;
10810 if (reversep
10811 && modifier != EXPAND_MEMORY
10812 && modifier != EXPAND_WRITE)
10813 op0 = flip_storage_order (mode1, op0);
10815 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10816 || modifier == EXPAND_CONST_ADDRESS
10817 || modifier == EXPAND_INITIALIZER)
10818 return op0;
10820 if (target == 0)
10821 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10823 convert_move (target, op0, unsignedp);
10824 return target;
10827 case OBJ_TYPE_REF:
10828 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10830 case CALL_EXPR:
10831 /* All valid uses of __builtin_va_arg_pack () are removed during
10832 inlining. */
10833 if (CALL_EXPR_VA_ARG_PACK (exp))
10834 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10836 tree fndecl = get_callee_fndecl (exp), attr;
10838 if (fndecl
10839 && (attr = lookup_attribute ("error",
10840 DECL_ATTRIBUTES (fndecl))) != NULL)
10841 error ("%Kcall to %qs declared with attribute error: %s",
10842 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10843 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10844 if (fndecl
10845 && (attr = lookup_attribute ("warning",
10846 DECL_ATTRIBUTES (fndecl))) != NULL)
10847 warning_at (tree_nonartificial_location (exp),
10848 0, "%Kcall to %qs declared with attribute warning: %s",
10849 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10850 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10852 /* Check for a built-in function. */
10853 if (fndecl && DECL_BUILT_IN (fndecl))
10855 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10856 if (CALL_WITH_BOUNDS_P (exp))
10857 return expand_builtin_with_bounds (exp, target, subtarget,
10858 tmode, ignore);
10859 else
10860 return expand_builtin (exp, target, subtarget, tmode, ignore);
10863 return expand_call (exp, target, ignore);
10865 case VIEW_CONVERT_EXPR:
10866 op0 = NULL_RTX;
10868 /* If we are converting to BLKmode, try to avoid an intermediate
10869 temporary by fetching an inner memory reference. */
10870 if (mode == BLKmode
10871 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10872 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10873 && handled_component_p (treeop0))
10875 machine_mode mode1;
10876 HOST_WIDE_INT bitsize, bitpos;
10877 tree offset;
10878 int unsignedp, reversep, volatilep = 0;
10879 tree tem
10880 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10881 &unsignedp, &reversep, &volatilep);
10882 rtx orig_op0;
10884 /* ??? We should work harder and deal with non-zero offsets. */
10885 if (!offset
10886 && (bitpos % BITS_PER_UNIT) == 0
10887 && !reversep
10888 && bitsize >= 0
10889 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10891 /* See the normal_inner_ref case for the rationale. */
10892 orig_op0
10893 = expand_expr_real (tem,
10894 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10895 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10896 != INTEGER_CST)
10897 && modifier != EXPAND_STACK_PARM
10898 ? target : NULL_RTX),
10899 VOIDmode,
10900 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10901 NULL, true);
10903 if (MEM_P (orig_op0))
10905 op0 = orig_op0;
10907 /* Get a reference to just this component. */
10908 if (modifier == EXPAND_CONST_ADDRESS
10909 || modifier == EXPAND_SUM
10910 || modifier == EXPAND_INITIALIZER)
10911 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10912 else
10913 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10915 if (op0 == orig_op0)
10916 op0 = copy_rtx (op0);
10918 set_mem_attributes (op0, treeop0, 0);
10919 if (REG_P (XEXP (op0, 0)))
10920 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10922 MEM_VOLATILE_P (op0) |= volatilep;
10927 if (!op0)
10928 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10929 NULL, inner_reference_p);
10931 /* If the input and output modes are both the same, we are done. */
10932 if (mode == GET_MODE (op0))
10934 /* If neither mode is BLKmode, and both modes are the same size
10935 then we can use gen_lowpart. */
10936 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10937 && (GET_MODE_PRECISION (mode)
10938 == GET_MODE_PRECISION (GET_MODE (op0)))
10939 && !COMPLEX_MODE_P (GET_MODE (op0)))
10941 if (GET_CODE (op0) == SUBREG)
10942 op0 = force_reg (GET_MODE (op0), op0);
10943 temp = gen_lowpart_common (mode, op0);
10944 if (temp)
10945 op0 = temp;
10946 else
10948 if (!REG_P (op0) && !MEM_P (op0))
10949 op0 = force_reg (GET_MODE (op0), op0);
10950 op0 = gen_lowpart (mode, op0);
10953 /* If both types are integral, convert from one mode to the other. */
10954 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10955 op0 = convert_modes (mode, GET_MODE (op0), op0,
10956 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10957 /* If the output type is a bit-field type, do an extraction. */
10958 else if (reduce_bit_field)
10959 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10960 TYPE_UNSIGNED (type), NULL_RTX,
10961 mode, mode, false, NULL);
10962 /* As a last resort, spill op0 to memory, and reload it in a
10963 different mode. */
10964 else if (!MEM_P (op0))
10966 /* If the operand is not a MEM, force it into memory. Since we
10967 are going to be changing the mode of the MEM, don't call
10968 force_const_mem for constants because we don't allow pool
10969 constants to change mode. */
10970 tree inner_type = TREE_TYPE (treeop0);
10972 gcc_assert (!TREE_ADDRESSABLE (exp));
10974 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10975 target
10976 = assign_stack_temp_for_type
10977 (TYPE_MODE (inner_type),
10978 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10980 emit_move_insn (target, op0);
10981 op0 = target;
10984 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10985 output type is such that the operand is known to be aligned, indicate
10986 that it is. Otherwise, we need only be concerned about alignment for
10987 non-BLKmode results. */
10988 if (MEM_P (op0))
10990 enum insn_code icode;
10992 if (modifier != EXPAND_WRITE
10993 && modifier != EXPAND_MEMORY
10994 && !inner_reference_p
10995 && mode != BLKmode
10996 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10998 /* If the target does have special handling for unaligned
10999 loads of mode then use them. */
11000 if ((icode = optab_handler (movmisalign_optab, mode))
11001 != CODE_FOR_nothing)
11003 rtx reg;
11005 op0 = adjust_address (op0, mode, 0);
11006 /* We've already validated the memory, and we're creating a
11007 new pseudo destination. The predicates really can't
11008 fail. */
11009 reg = gen_reg_rtx (mode);
11011 /* Nor can the insn generator. */
11012 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11013 emit_insn (insn);
11014 return reg;
11016 else if (STRICT_ALIGNMENT)
11018 tree inner_type = TREE_TYPE (treeop0);
11019 HOST_WIDE_INT temp_size
11020 = MAX (int_size_in_bytes (inner_type),
11021 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
11022 rtx new_rtx
11023 = assign_stack_temp_for_type (mode, temp_size, type);
11024 rtx new_with_op0_mode
11025 = adjust_address (new_rtx, GET_MODE (op0), 0);
11027 gcc_assert (!TREE_ADDRESSABLE (exp));
11029 if (GET_MODE (op0) == BLKmode)
11030 emit_block_move (new_with_op0_mode, op0,
11031 GEN_INT (GET_MODE_SIZE (mode)),
11032 (modifier == EXPAND_STACK_PARM
11033 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
11034 else
11035 emit_move_insn (new_with_op0_mode, op0);
11037 op0 = new_rtx;
11041 op0 = adjust_address (op0, mode, 0);
11044 return op0;
11046 case MODIFY_EXPR:
11048 tree lhs = treeop0;
11049 tree rhs = treeop1;
11050 gcc_assert (ignore);
11052 /* Check for |= or &= of a bitfield of size one into another bitfield
11053 of size 1. In this case, (unless we need the result of the
11054 assignment) we can do this more efficiently with a
11055 test followed by an assignment, if necessary.
11057 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11058 things change so we do, this code should be enhanced to
11059 support it. */
11060 if (TREE_CODE (lhs) == COMPONENT_REF
11061 && (TREE_CODE (rhs) == BIT_IOR_EXPR
11062 || TREE_CODE (rhs) == BIT_AND_EXPR)
11063 && TREE_OPERAND (rhs, 0) == lhs
11064 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11065 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11066 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11068 rtx_code_label *label = gen_label_rtx ();
11069 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11070 do_jump (TREE_OPERAND (rhs, 1),
11071 value ? label : 0,
11072 value ? 0 : label,
11073 profile_probability::uninitialized ());
11074 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11075 false);
11076 do_pending_stack_adjust ();
11077 emit_label (label);
11078 return const0_rtx;
11081 expand_assignment (lhs, rhs, false);
11082 return const0_rtx;
11085 case ADDR_EXPR:
11086 return expand_expr_addr_expr (exp, target, tmode, modifier);
11088 case REALPART_EXPR:
11089 op0 = expand_normal (treeop0);
11090 return read_complex_part (op0, false);
11092 case IMAGPART_EXPR:
11093 op0 = expand_normal (treeop0);
11094 return read_complex_part (op0, true);
11096 case RETURN_EXPR:
11097 case LABEL_EXPR:
11098 case GOTO_EXPR:
11099 case SWITCH_EXPR:
11100 case ASM_EXPR:
11101 /* Expanded in cfgexpand.c. */
11102 gcc_unreachable ();
11104 case TRY_CATCH_EXPR:
11105 case CATCH_EXPR:
11106 case EH_FILTER_EXPR:
11107 case TRY_FINALLY_EXPR:
11108 /* Lowered by tree-eh.c. */
11109 gcc_unreachable ();
11111 case WITH_CLEANUP_EXPR:
11112 case CLEANUP_POINT_EXPR:
11113 case TARGET_EXPR:
11114 case CASE_LABEL_EXPR:
11115 case VA_ARG_EXPR:
11116 case BIND_EXPR:
11117 case INIT_EXPR:
11118 case CONJ_EXPR:
11119 case COMPOUND_EXPR:
11120 case PREINCREMENT_EXPR:
11121 case PREDECREMENT_EXPR:
11122 case POSTINCREMENT_EXPR:
11123 case POSTDECREMENT_EXPR:
11124 case LOOP_EXPR:
11125 case EXIT_EXPR:
11126 case COMPOUND_LITERAL_EXPR:
11127 /* Lowered by gimplify.c. */
11128 gcc_unreachable ();
11130 case FDESC_EXPR:
11131 /* Function descriptors are not valid except for as
11132 initialization constants, and should not be expanded. */
11133 gcc_unreachable ();
11135 case WITH_SIZE_EXPR:
11136 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11137 have pulled out the size to use in whatever context it needed. */
11138 return expand_expr_real (treeop0, original_target, tmode,
11139 modifier, alt_rtl, inner_reference_p);
11141 default:
11142 return expand_expr_real_2 (&ops, target, tmode, modifier);
11146 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11147 signedness of TYPE), possibly returning the result in TARGET. */
11148 static rtx
11149 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11151 HOST_WIDE_INT prec = TYPE_PRECISION (type);
11152 if (target && GET_MODE (target) != GET_MODE (exp))
11153 target = 0;
11154 /* For constant values, reduce using build_int_cst_type. */
11155 if (CONST_INT_P (exp))
11157 HOST_WIDE_INT value = INTVAL (exp);
11158 tree t = build_int_cst_type (type, value);
11159 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11161 else if (TYPE_UNSIGNED (type))
11163 machine_mode mode = GET_MODE (exp);
11164 rtx mask = immed_wide_int_const
11165 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11166 return expand_and (mode, exp, mask, target);
11168 else
11170 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
11171 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
11172 exp, count, target, 0);
11173 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
11174 exp, count, target, 0);
11178 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11179 when applied to the address of EXP produces an address known to be
11180 aligned more than BIGGEST_ALIGNMENT. */
11182 static int
11183 is_aligning_offset (const_tree offset, const_tree exp)
11185 /* Strip off any conversions. */
11186 while (CONVERT_EXPR_P (offset))
11187 offset = TREE_OPERAND (offset, 0);
11189 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11190 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11191 if (TREE_CODE (offset) != BIT_AND_EXPR
11192 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11193 || compare_tree_int (TREE_OPERAND (offset, 1),
11194 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11195 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11196 return 0;
11198 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11199 It must be NEGATE_EXPR. Then strip any more conversions. */
11200 offset = TREE_OPERAND (offset, 0);
11201 while (CONVERT_EXPR_P (offset))
11202 offset = TREE_OPERAND (offset, 0);
11204 if (TREE_CODE (offset) != NEGATE_EXPR)
11205 return 0;
11207 offset = TREE_OPERAND (offset, 0);
11208 while (CONVERT_EXPR_P (offset))
11209 offset = TREE_OPERAND (offset, 0);
11211 /* This must now be the address of EXP. */
11212 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11215 /* Return the tree node if an ARG corresponds to a string constant or zero
11216 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11217 in bytes within the string that ARG is accessing. The type of the
11218 offset will be `sizetype'. */
11220 tree
11221 string_constant (tree arg, tree *ptr_offset)
11223 tree array, offset, lower_bound;
11224 STRIP_NOPS (arg);
11226 if (TREE_CODE (arg) == ADDR_EXPR)
11228 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11230 *ptr_offset = size_zero_node;
11231 return TREE_OPERAND (arg, 0);
11233 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11235 array = TREE_OPERAND (arg, 0);
11236 offset = size_zero_node;
11238 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11240 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11241 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11242 if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11243 return 0;
11245 /* Check if the array has a nonzero lower bound. */
11246 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11247 if (!integer_zerop (lower_bound))
11249 /* If the offset and base aren't both constants, return 0. */
11250 if (TREE_CODE (lower_bound) != INTEGER_CST)
11251 return 0;
11252 if (TREE_CODE (offset) != INTEGER_CST)
11253 return 0;
11254 /* Adjust offset by the lower bound. */
11255 offset = size_diffop (fold_convert (sizetype, offset),
11256 fold_convert (sizetype, lower_bound));
11259 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11261 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11262 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11263 if (TREE_CODE (array) != ADDR_EXPR)
11264 return 0;
11265 array = TREE_OPERAND (array, 0);
11266 if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11267 return 0;
11269 else
11270 return 0;
11272 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11274 tree arg0 = TREE_OPERAND (arg, 0);
11275 tree arg1 = TREE_OPERAND (arg, 1);
11277 STRIP_NOPS (arg0);
11278 STRIP_NOPS (arg1);
11280 if (TREE_CODE (arg0) == ADDR_EXPR
11281 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11282 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11284 array = TREE_OPERAND (arg0, 0);
11285 offset = arg1;
11287 else if (TREE_CODE (arg1) == ADDR_EXPR
11288 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11289 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11291 array = TREE_OPERAND (arg1, 0);
11292 offset = arg0;
11294 else
11295 return 0;
11297 else
11298 return 0;
11300 if (TREE_CODE (array) == STRING_CST)
11302 *ptr_offset = fold_convert (sizetype, offset);
11303 return array;
11305 else if (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
11307 int length;
11308 tree init = ctor_for_folding (array);
11310 /* Variables initialized to string literals can be handled too. */
11311 if (init == error_mark_node
11312 || !init
11313 || TREE_CODE (init) != STRING_CST)
11314 return 0;
11316 /* Avoid const char foo[4] = "abcde"; */
11317 if (DECL_SIZE_UNIT (array) == NULL_TREE
11318 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11319 || (length = TREE_STRING_LENGTH (init)) <= 0
11320 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11321 return 0;
11323 /* If variable is bigger than the string literal, OFFSET must be constant
11324 and inside of the bounds of the string literal. */
11325 offset = fold_convert (sizetype, offset);
11326 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11327 && (! tree_fits_uhwi_p (offset)
11328 || compare_tree_int (offset, length) >= 0))
11329 return 0;
11331 *ptr_offset = offset;
11332 return init;
11335 return 0;
11338 /* Generate code to calculate OPS, and exploded expression
11339 using a store-flag instruction and return an rtx for the result.
11340 OPS reflects a comparison.
11342 If TARGET is nonzero, store the result there if convenient.
11344 Return zero if there is no suitable set-flag instruction
11345 available on this machine.
11347 Once expand_expr has been called on the arguments of the comparison,
11348 we are committed to doing the store flag, since it is not safe to
11349 re-evaluate the expression. We emit the store-flag insn by calling
11350 emit_store_flag, but only expand the arguments if we have a reason
11351 to believe that emit_store_flag will be successful. If we think that
11352 it will, but it isn't, we have to simulate the store-flag with a
11353 set/jump/set sequence. */
11355 static rtx
11356 do_store_flag (sepops ops, rtx target, machine_mode mode)
11358 enum rtx_code code;
11359 tree arg0, arg1, type;
11360 machine_mode operand_mode;
11361 int unsignedp;
11362 rtx op0, op1;
11363 rtx subtarget = target;
11364 location_t loc = ops->location;
11366 arg0 = ops->op0;
11367 arg1 = ops->op1;
11369 /* Don't crash if the comparison was erroneous. */
11370 if (arg0 == error_mark_node || arg1 == error_mark_node)
11371 return const0_rtx;
11373 type = TREE_TYPE (arg0);
11374 operand_mode = TYPE_MODE (type);
11375 unsignedp = TYPE_UNSIGNED (type);
11377 /* We won't bother with BLKmode store-flag operations because it would mean
11378 passing a lot of information to emit_store_flag. */
11379 if (operand_mode == BLKmode)
11380 return 0;
11382 /* We won't bother with store-flag operations involving function pointers
11383 when function pointers must be canonicalized before comparisons. */
11384 if (targetm.have_canonicalize_funcptr_for_compare ()
11385 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11386 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11387 == FUNCTION_TYPE))
11388 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11389 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11390 == FUNCTION_TYPE))))
11391 return 0;
11393 STRIP_NOPS (arg0);
11394 STRIP_NOPS (arg1);
11396 /* For vector typed comparisons emit code to generate the desired
11397 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11398 expander for this. */
11399 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11401 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11402 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11403 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
11404 return expand_vec_cmp_expr (ops->type, ifexp, target);
11405 else
11407 tree if_true = constant_boolean_node (true, ops->type);
11408 tree if_false = constant_boolean_node (false, ops->type);
11409 return expand_vec_cond_expr (ops->type, ifexp, if_true,
11410 if_false, target);
11414 /* Get the rtx comparison code to use. We know that EXP is a comparison
11415 operation of some type. Some comparisons against 1 and -1 can be
11416 converted to comparisons with zero. Do so here so that the tests
11417 below will be aware that we have a comparison with zero. These
11418 tests will not catch constants in the first operand, but constants
11419 are rarely passed as the first operand. */
11421 switch (ops->code)
11423 case EQ_EXPR:
11424 code = EQ;
11425 break;
11426 case NE_EXPR:
11427 code = NE;
11428 break;
11429 case LT_EXPR:
11430 if (integer_onep (arg1))
11431 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11432 else
11433 code = unsignedp ? LTU : LT;
11434 break;
11435 case LE_EXPR:
11436 if (! unsignedp && integer_all_onesp (arg1))
11437 arg1 = integer_zero_node, code = LT;
11438 else
11439 code = unsignedp ? LEU : LE;
11440 break;
11441 case GT_EXPR:
11442 if (! unsignedp && integer_all_onesp (arg1))
11443 arg1 = integer_zero_node, code = GE;
11444 else
11445 code = unsignedp ? GTU : GT;
11446 break;
11447 case GE_EXPR:
11448 if (integer_onep (arg1))
11449 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11450 else
11451 code = unsignedp ? GEU : GE;
11452 break;
11454 case UNORDERED_EXPR:
11455 code = UNORDERED;
11456 break;
11457 case ORDERED_EXPR:
11458 code = ORDERED;
11459 break;
11460 case UNLT_EXPR:
11461 code = UNLT;
11462 break;
11463 case UNLE_EXPR:
11464 code = UNLE;
11465 break;
11466 case UNGT_EXPR:
11467 code = UNGT;
11468 break;
11469 case UNGE_EXPR:
11470 code = UNGE;
11471 break;
11472 case UNEQ_EXPR:
11473 code = UNEQ;
11474 break;
11475 case LTGT_EXPR:
11476 code = LTGT;
11477 break;
11479 default:
11480 gcc_unreachable ();
11483 /* Put a constant second. */
11484 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11485 || TREE_CODE (arg0) == FIXED_CST)
11487 std::swap (arg0, arg1);
11488 code = swap_condition (code);
11491 /* If this is an equality or inequality test of a single bit, we can
11492 do this by shifting the bit being tested to the low-order bit and
11493 masking the result with the constant 1. If the condition was EQ,
11494 we xor it with 1. This does not require an scc insn and is faster
11495 than an scc insn even if we have it.
11497 The code to make this transformation was moved into fold_single_bit_test,
11498 so we just call into the folder and expand its result. */
11500 if ((code == NE || code == EQ)
11501 && integer_zerop (arg1)
11502 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11504 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11505 if (srcstmt
11506 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11508 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11509 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11510 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11511 gimple_assign_rhs1 (srcstmt),
11512 gimple_assign_rhs2 (srcstmt));
11513 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11514 if (temp)
11515 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11519 if (! get_subtarget (target)
11520 || GET_MODE (subtarget) != operand_mode)
11521 subtarget = 0;
11523 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11525 if (target == 0)
11526 target = gen_reg_rtx (mode);
11528 /* Try a cstore if possible. */
11529 return emit_store_flag_force (target, code, op0, op1,
11530 operand_mode, unsignedp,
11531 (TYPE_PRECISION (ops->type) == 1
11532 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11535 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11536 0 otherwise (i.e. if there is no casesi instruction).
11538 DEFAULT_PROBABILITY is the probability of jumping to the default
11539 label. */
11541 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11542 rtx table_label, rtx default_label, rtx fallback_label,
11543 profile_probability default_probability)
11545 struct expand_operand ops[5];
11546 machine_mode index_mode = SImode;
11547 rtx op1, op2, index;
11549 if (! targetm.have_casesi ())
11550 return 0;
11552 /* The index must be some form of integer. Convert it to SImode. */
11553 scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
11554 if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
11556 rtx rangertx = expand_normal (range);
11558 /* We must handle the endpoints in the original mode. */
11559 index_expr = build2 (MINUS_EXPR, index_type,
11560 index_expr, minval);
11561 minval = integer_zero_node;
11562 index = expand_normal (index_expr);
11563 if (default_label)
11564 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11565 omode, 1, default_label,
11566 default_probability);
11567 /* Now we can safely truncate. */
11568 index = convert_to_mode (index_mode, index, 0);
11570 else
11572 if (omode != index_mode)
11574 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11575 index_expr = fold_convert (index_type, index_expr);
11578 index = expand_normal (index_expr);
11581 do_pending_stack_adjust ();
11583 op1 = expand_normal (minval);
11584 op2 = expand_normal (range);
11586 create_input_operand (&ops[0], index, index_mode);
11587 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11588 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11589 create_fixed_operand (&ops[3], table_label);
11590 create_fixed_operand (&ops[4], (default_label
11591 ? default_label
11592 : fallback_label));
11593 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11594 return 1;
11597 /* Attempt to generate a tablejump instruction; same concept. */
11598 /* Subroutine of the next function.
11600 INDEX is the value being switched on, with the lowest value
11601 in the table already subtracted.
11602 MODE is its expected mode (needed if INDEX is constant).
11603 RANGE is the length of the jump table.
11604 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11606 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11607 index value is out of range.
11608 DEFAULT_PROBABILITY is the probability of jumping to
11609 the default label. */
11611 static void
11612 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11613 rtx default_label, profile_probability default_probability)
11615 rtx temp, vector;
11617 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11618 cfun->cfg->max_jumptable_ents = INTVAL (range);
11620 /* Do an unsigned comparison (in the proper mode) between the index
11621 expression and the value which represents the length of the range.
11622 Since we just finished subtracting the lower bound of the range
11623 from the index expression, this comparison allows us to simultaneously
11624 check that the original index expression value is both greater than
11625 or equal to the minimum value of the range and less than or equal to
11626 the maximum value of the range. */
11628 if (default_label)
11629 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11630 default_label, default_probability);
11633 /* If index is in range, it must fit in Pmode.
11634 Convert to Pmode so we can index with it. */
11635 if (mode != Pmode)
11636 index = convert_to_mode (Pmode, index, 1);
11638 /* Don't let a MEM slip through, because then INDEX that comes
11639 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11640 and break_out_memory_refs will go to work on it and mess it up. */
11641 #ifdef PIC_CASE_VECTOR_ADDRESS
11642 if (flag_pic && !REG_P (index))
11643 index = copy_to_mode_reg (Pmode, index);
11644 #endif
11646 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11647 GET_MODE_SIZE, because this indicates how large insns are. The other
11648 uses should all be Pmode, because they are addresses. This code
11649 could fail if addresses and insns are not the same size. */
11650 index = simplify_gen_binary (MULT, Pmode, index,
11651 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11652 Pmode));
11653 index = simplify_gen_binary (PLUS, Pmode, index,
11654 gen_rtx_LABEL_REF (Pmode, table_label));
11656 #ifdef PIC_CASE_VECTOR_ADDRESS
11657 if (flag_pic)
11658 index = PIC_CASE_VECTOR_ADDRESS (index);
11659 else
11660 #endif
11661 index = memory_address (CASE_VECTOR_MODE, index);
11662 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11663 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11664 convert_move (temp, vector, 0);
11666 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11668 /* If we are generating PIC code or if the table is PC-relative, the
11669 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11670 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11671 emit_barrier ();
11675 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11676 rtx table_label, rtx default_label,
11677 profile_probability default_probability)
11679 rtx index;
11681 if (! targetm.have_tablejump ())
11682 return 0;
11684 index_expr = fold_build2 (MINUS_EXPR, index_type,
11685 fold_convert (index_type, index_expr),
11686 fold_convert (index_type, minval));
11687 index = expand_normal (index_expr);
11688 do_pending_stack_adjust ();
11690 do_tablejump (index, TYPE_MODE (index_type),
11691 convert_modes (TYPE_MODE (index_type),
11692 TYPE_MODE (TREE_TYPE (range)),
11693 expand_normal (range),
11694 TYPE_UNSIGNED (TREE_TYPE (range))),
11695 table_label, default_label, default_probability);
11696 return 1;
11699 /* Return a CONST_VECTOR rtx representing vector mask for
11700 a VECTOR_CST of booleans. */
11701 static rtx
11702 const_vector_mask_from_tree (tree exp)
11704 rtvec v;
11705 unsigned i;
11706 int units;
11707 tree elt;
11708 machine_mode inner, mode;
11710 mode = TYPE_MODE (TREE_TYPE (exp));
11711 units = GET_MODE_NUNITS (mode);
11712 inner = GET_MODE_INNER (mode);
11714 v = rtvec_alloc (units);
11716 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11718 elt = VECTOR_CST_ELT (exp, i);
11720 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11721 if (integer_zerop (elt))
11722 RTVEC_ELT (v, i) = CONST0_RTX (inner);
11723 else if (integer_onep (elt)
11724 || integer_minus_onep (elt))
11725 RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11726 else
11727 gcc_unreachable ();
11730 return gen_rtx_CONST_VECTOR (mode, v);
11733 /* Return a CONST_INT rtx representing vector mask for
11734 a VECTOR_CST of booleans. */
11735 static rtx
11736 const_scalar_mask_from_tree (tree exp)
11738 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11739 wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11740 tree elt;
11741 unsigned i;
11743 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11745 elt = VECTOR_CST_ELT (exp, i);
11746 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11747 if (integer_all_onesp (elt))
11748 res = wi::set_bit (res, i);
11749 else
11750 gcc_assert (integer_zerop (elt));
11753 return immed_wide_int_const (res, mode);
11756 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11757 static rtx
11758 const_vector_from_tree (tree exp)
11760 rtvec v;
11761 unsigned i;
11762 int units;
11763 tree elt;
11764 machine_mode inner, mode;
11766 mode = TYPE_MODE (TREE_TYPE (exp));
11768 if (initializer_zerop (exp))
11769 return CONST0_RTX (mode);
11771 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11772 return const_vector_mask_from_tree (exp);
11774 units = GET_MODE_NUNITS (mode);
11775 inner = GET_MODE_INNER (mode);
11777 v = rtvec_alloc (units);
11779 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11781 elt = VECTOR_CST_ELT (exp, i);
11783 if (TREE_CODE (elt) == REAL_CST)
11784 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11785 inner);
11786 else if (TREE_CODE (elt) == FIXED_CST)
11787 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11788 inner);
11789 else
11790 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11793 return gen_rtx_CONST_VECTOR (mode, v);
11796 /* Build a decl for a personality function given a language prefix. */
11798 tree
11799 build_personality_function (const char *lang)
11801 const char *unwind_and_version;
11802 tree decl, type;
11803 char *name;
11805 switch (targetm_common.except_unwind_info (&global_options))
11807 case UI_NONE:
11808 return NULL;
11809 case UI_SJLJ:
11810 unwind_and_version = "_sj0";
11811 break;
11812 case UI_DWARF2:
11813 case UI_TARGET:
11814 unwind_and_version = "_v0";
11815 break;
11816 case UI_SEH:
11817 unwind_and_version = "_seh0";
11818 break;
11819 default:
11820 gcc_unreachable ();
11823 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11825 type = build_function_type_list (integer_type_node, integer_type_node,
11826 long_long_unsigned_type_node,
11827 ptr_type_node, ptr_type_node, NULL_TREE);
11828 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11829 get_identifier (name), type);
11830 DECL_ARTIFICIAL (decl) = 1;
11831 DECL_EXTERNAL (decl) = 1;
11832 TREE_PUBLIC (decl) = 1;
11834 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11835 are the flags assigned by targetm.encode_section_info. */
11836 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11838 return decl;
11841 /* Extracts the personality function of DECL and returns the corresponding
11842 libfunc. */
11845 get_personality_function (tree decl)
11847 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11848 enum eh_personality_kind pk;
11850 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11851 if (pk == eh_personality_none)
11852 return NULL;
11854 if (!personality
11855 && pk == eh_personality_any)
11856 personality = lang_hooks.eh_personality ();
11858 if (pk == eh_personality_lang)
11859 gcc_assert (personality != NULL_TREE);
11861 return XEXP (DECL_RTL (personality), 0);
11864 /* Returns a tree for the size of EXP in bytes. */
11866 static tree
11867 tree_expr_size (const_tree exp)
11869 if (DECL_P (exp)
11870 && DECL_SIZE_UNIT (exp) != 0)
11871 return DECL_SIZE_UNIT (exp);
11872 else
11873 return size_in_bytes (TREE_TYPE (exp));
11876 /* Return an rtx for the size in bytes of the value of EXP. */
11879 expr_size (tree exp)
11881 tree size;
11883 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11884 size = TREE_OPERAND (exp, 1);
11885 else
11887 size = tree_expr_size (exp);
11888 gcc_assert (size);
11889 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11892 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11895 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11896 if the size can vary or is larger than an integer. */
11898 static HOST_WIDE_INT
11899 int_expr_size (tree exp)
11901 tree size;
11903 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11904 size = TREE_OPERAND (exp, 1);
11905 else
11907 size = tree_expr_size (exp);
11908 gcc_assert (size);
11911 if (size == 0 || !tree_fits_shwi_p (size))
11912 return -1;
11914 return tree_to_shwi (size);