poly_int: tree constants
[official-gcc.git] / gcc / expr.c
bloba6b236507fa96322effdd2d2b3ee9fe13f07d4e1
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
60 #include "builtins.h"
61 #include "tree-chkp.h"
62 #include "rtl-chkp.h"
63 #include "ccmp.h"
66 /* If this is nonzero, we do not bother generating VOLATILE
67 around volatile memory references, and we are willing to
68 output indirect addresses. If cse is to follow, we reject
69 indirect addresses so a useful potential cse is generated;
70 if it is used only once, instruction combination will produce
71 the same indirect address eventually. */
72 int cse_not_expected;
74 static bool block_move_libcall_safe_for_call_parm (void);
75 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
76 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
77 unsigned HOST_WIDE_INT);
78 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
79 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
80 static rtx_insn *compress_float_constant (rtx, rtx);
81 static rtx get_subtarget (rtx);
82 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
83 HOST_WIDE_INT, unsigned HOST_WIDE_INT,
84 unsigned HOST_WIDE_INT, machine_mode,
85 tree, int, alias_set_type, bool);
86 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
87 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
88 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
89 machine_mode, tree, alias_set_type, bool, bool);
91 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
93 static int is_aligning_offset (const_tree, const_tree);
94 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
95 static rtx do_store_flag (sepops, rtx, machine_mode);
96 #ifdef PUSH_ROUNDING
97 static void emit_single_push_insn (machine_mode, rtx, tree);
98 #endif
99 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
100 profile_probability);
101 static rtx const_vector_from_tree (tree);
102 static rtx const_scalar_mask_from_tree (scalar_int_mode, tree);
103 static tree tree_expr_size (const_tree);
104 static HOST_WIDE_INT int_expr_size (tree);
105 static void convert_mode_scalar (rtx, rtx, int);
108 /* This is run to set up which modes can be used
109 directly in memory and to initialize the block move optab. It is run
110 at the beginning of compilation and when the target is reinitialized. */
112 void
113 init_expr_target (void)
115 rtx pat;
116 int num_clobbers;
117 rtx mem, mem1;
118 rtx reg;
120 /* Try indexing by frame ptr and try by stack ptr.
121 It is known that on the Convex the stack ptr isn't a valid index.
122 With luck, one or the other is valid on any machine. */
123 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
124 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
126 /* A scratch register we can modify in-place below to avoid
127 useless RTL allocations. */
128 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
130 rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
131 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
132 PATTERN (insn) = pat;
134 for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
135 mode = (machine_mode) ((int) mode + 1))
137 int regno;
139 direct_load[(int) mode] = direct_store[(int) mode] = 0;
140 PUT_MODE (mem, mode);
141 PUT_MODE (mem1, mode);
143 /* See if there is some register that can be used in this mode and
144 directly loaded or stored from memory. */
146 if (mode != VOIDmode && mode != BLKmode)
147 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
148 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
149 regno++)
151 if (!targetm.hard_regno_mode_ok (regno, mode))
152 continue;
154 set_mode_and_regno (reg, mode, regno);
156 SET_SRC (pat) = mem;
157 SET_DEST (pat) = reg;
158 if (recog (pat, insn, &num_clobbers) >= 0)
159 direct_load[(int) mode] = 1;
161 SET_SRC (pat) = mem1;
162 SET_DEST (pat) = reg;
163 if (recog (pat, insn, &num_clobbers) >= 0)
164 direct_load[(int) mode] = 1;
166 SET_SRC (pat) = reg;
167 SET_DEST (pat) = mem;
168 if (recog (pat, insn, &num_clobbers) >= 0)
169 direct_store[(int) mode] = 1;
171 SET_SRC (pat) = reg;
172 SET_DEST (pat) = mem1;
173 if (recog (pat, insn, &num_clobbers) >= 0)
174 direct_store[(int) mode] = 1;
178 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
180 opt_scalar_float_mode mode_iter;
181 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
183 scalar_float_mode mode = mode_iter.require ();
184 scalar_float_mode srcmode;
185 FOR_EACH_MODE_UNTIL (srcmode, mode)
187 enum insn_code ic;
189 ic = can_extend_p (mode, srcmode, 0);
190 if (ic == CODE_FOR_nothing)
191 continue;
193 PUT_MODE (mem, srcmode);
195 if (insn_operand_matches (ic, 1, mem))
196 float_extend_from_mem[mode][srcmode] = true;
201 /* This is run at the start of compiling a function. */
203 void
204 init_expr (void)
206 memset (&crtl->expr, 0, sizeof (crtl->expr));
209 /* Copy data from FROM to TO, where the machine modes are not the same.
210 Both modes may be integer, or both may be floating, or both may be
211 fixed-point.
212 UNSIGNEDP should be nonzero if FROM is an unsigned type.
213 This causes zero-extension instead of sign-extension. */
215 void
216 convert_move (rtx to, rtx from, int unsignedp)
218 machine_mode to_mode = GET_MODE (to);
219 machine_mode from_mode = GET_MODE (from);
221 gcc_assert (to_mode != BLKmode);
222 gcc_assert (from_mode != BLKmode);
224 /* If the source and destination are already the same, then there's
225 nothing to do. */
226 if (to == from)
227 return;
229 /* If FROM is a SUBREG that indicates that we have already done at least
230 the required extension, strip it. We don't handle such SUBREGs as
231 TO here. */
233 scalar_int_mode to_int_mode;
234 if (GET_CODE (from) == SUBREG
235 && SUBREG_PROMOTED_VAR_P (from)
236 && is_a <scalar_int_mode> (to_mode, &to_int_mode)
237 && (GET_MODE_PRECISION (subreg_promoted_mode (from))
238 >= GET_MODE_PRECISION (to_int_mode))
239 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
240 from = gen_lowpart (to_int_mode, from), from_mode = to_int_mode;
242 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
244 if (to_mode == from_mode
245 || (from_mode == VOIDmode && CONSTANT_P (from)))
247 emit_move_insn (to, from);
248 return;
251 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
253 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
255 if (VECTOR_MODE_P (to_mode))
256 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
257 else
258 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
260 emit_move_insn (to, from);
261 return;
264 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
266 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
267 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
268 return;
271 convert_mode_scalar (to, from, unsignedp);
274 /* Like convert_move, but deals only with scalar modes. */
276 static void
277 convert_mode_scalar (rtx to, rtx from, int unsignedp)
279 /* Both modes should be scalar types. */
280 scalar_mode from_mode = as_a <scalar_mode> (GET_MODE (from));
281 scalar_mode to_mode = as_a <scalar_mode> (GET_MODE (to));
282 bool to_real = SCALAR_FLOAT_MODE_P (to_mode);
283 bool from_real = SCALAR_FLOAT_MODE_P (from_mode);
284 enum insn_code code;
285 rtx libcall;
287 gcc_assert (to_real == from_real);
289 /* rtx code for making an equivalent value. */
290 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
291 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
293 if (to_real)
295 rtx value;
296 rtx_insn *insns;
297 convert_optab tab;
299 gcc_assert ((GET_MODE_PRECISION (from_mode)
300 != GET_MODE_PRECISION (to_mode))
301 || (DECIMAL_FLOAT_MODE_P (from_mode)
302 != DECIMAL_FLOAT_MODE_P (to_mode)));
304 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
305 /* Conversion between decimal float and binary float, same size. */
306 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
307 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
308 tab = sext_optab;
309 else
310 tab = trunc_optab;
312 /* Try converting directly if the insn is supported. */
314 code = convert_optab_handler (tab, to_mode, from_mode);
315 if (code != CODE_FOR_nothing)
317 emit_unop_insn (code, to, from,
318 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
319 return;
322 /* Otherwise use a libcall. */
323 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
325 /* Is this conversion implemented yet? */
326 gcc_assert (libcall);
328 start_sequence ();
329 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
330 from, from_mode);
331 insns = get_insns ();
332 end_sequence ();
333 emit_libcall_block (insns, to, value,
334 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
335 from)
336 : gen_rtx_FLOAT_EXTEND (to_mode, from));
337 return;
340 /* Handle pointer conversion. */ /* SPEE 900220. */
341 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
343 convert_optab ctab;
345 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
346 ctab = trunc_optab;
347 else if (unsignedp)
348 ctab = zext_optab;
349 else
350 ctab = sext_optab;
352 if (convert_optab_handler (ctab, to_mode, from_mode)
353 != CODE_FOR_nothing)
355 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
356 to, from, UNKNOWN);
357 return;
361 /* Targets are expected to provide conversion insns between PxImode and
362 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
363 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
365 scalar_int_mode full_mode
366 = smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
368 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
369 != CODE_FOR_nothing);
371 if (full_mode != from_mode)
372 from = convert_to_mode (full_mode, from, unsignedp);
373 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
374 to, from, UNKNOWN);
375 return;
377 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
379 rtx new_from;
380 scalar_int_mode full_mode
381 = smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
382 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
383 enum insn_code icode;
385 icode = convert_optab_handler (ctab, full_mode, from_mode);
386 gcc_assert (icode != CODE_FOR_nothing);
388 if (to_mode == full_mode)
390 emit_unop_insn (icode, to, from, UNKNOWN);
391 return;
394 new_from = gen_reg_rtx (full_mode);
395 emit_unop_insn (icode, new_from, from, UNKNOWN);
397 /* else proceed to integer conversions below. */
398 from_mode = full_mode;
399 from = new_from;
402 /* Make sure both are fixed-point modes or both are not. */
403 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
404 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
405 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
407 /* If we widen from_mode to to_mode and they are in the same class,
408 we won't saturate the result.
409 Otherwise, always saturate the result to play safe. */
410 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
411 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
412 expand_fixed_convert (to, from, 0, 0);
413 else
414 expand_fixed_convert (to, from, 0, 1);
415 return;
418 /* Now both modes are integers. */
420 /* Handle expanding beyond a word. */
421 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
422 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
424 rtx_insn *insns;
425 rtx lowpart;
426 rtx fill_value;
427 rtx lowfrom;
428 int i;
429 scalar_mode lowpart_mode;
430 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
432 /* Try converting directly if the insn is supported. */
433 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
434 != CODE_FOR_nothing)
436 /* If FROM is a SUBREG, put it into a register. Do this
437 so that we always generate the same set of insns for
438 better cse'ing; if an intermediate assignment occurred,
439 we won't be doing the operation directly on the SUBREG. */
440 if (optimize > 0 && GET_CODE (from) == SUBREG)
441 from = force_reg (from_mode, from);
442 emit_unop_insn (code, to, from, equiv_code);
443 return;
445 /* Next, try converting via full word. */
446 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
447 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
448 != CODE_FOR_nothing))
450 rtx word_to = gen_reg_rtx (word_mode);
451 if (REG_P (to))
453 if (reg_overlap_mentioned_p (to, from))
454 from = force_reg (from_mode, from);
455 emit_clobber (to);
457 convert_move (word_to, from, unsignedp);
458 emit_unop_insn (code, to, word_to, equiv_code);
459 return;
462 /* No special multiword conversion insn; do it by hand. */
463 start_sequence ();
465 /* Since we will turn this into a no conflict block, we must ensure
466 the source does not overlap the target so force it into an isolated
467 register when maybe so. Likewise for any MEM input, since the
468 conversion sequence might require several references to it and we
469 must ensure we're getting the same value every time. */
471 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
472 from = force_reg (from_mode, from);
474 /* Get a copy of FROM widened to a word, if necessary. */
475 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
476 lowpart_mode = word_mode;
477 else
478 lowpart_mode = from_mode;
480 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
482 lowpart = gen_lowpart (lowpart_mode, to);
483 emit_move_insn (lowpart, lowfrom);
485 /* Compute the value to put in each remaining word. */
486 if (unsignedp)
487 fill_value = const0_rtx;
488 else
489 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
490 LT, lowfrom, const0_rtx,
491 lowpart_mode, 0, -1);
493 /* Fill the remaining words. */
494 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
496 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
497 rtx subword = operand_subword (to, index, 1, to_mode);
499 gcc_assert (subword);
501 if (fill_value != subword)
502 emit_move_insn (subword, fill_value);
505 insns = get_insns ();
506 end_sequence ();
508 emit_insn (insns);
509 return;
512 /* Truncating multi-word to a word or less. */
513 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
514 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
516 if (!((MEM_P (from)
517 && ! MEM_VOLATILE_P (from)
518 && direct_load[(int) to_mode]
519 && ! mode_dependent_address_p (XEXP (from, 0),
520 MEM_ADDR_SPACE (from)))
521 || REG_P (from)
522 || GET_CODE (from) == SUBREG))
523 from = force_reg (from_mode, from);
524 convert_move (to, gen_lowpart (word_mode, from), 0);
525 return;
528 /* Now follow all the conversions between integers
529 no more than a word long. */
531 /* For truncation, usually we can just refer to FROM in a narrower mode. */
532 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
533 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
535 if (!((MEM_P (from)
536 && ! MEM_VOLATILE_P (from)
537 && direct_load[(int) to_mode]
538 && ! mode_dependent_address_p (XEXP (from, 0),
539 MEM_ADDR_SPACE (from)))
540 || REG_P (from)
541 || GET_CODE (from) == SUBREG))
542 from = force_reg (from_mode, from);
543 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
544 && !targetm.hard_regno_mode_ok (REGNO (from), to_mode))
545 from = copy_to_reg (from);
546 emit_move_insn (to, gen_lowpart (to_mode, from));
547 return;
550 /* Handle extension. */
551 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
553 /* Convert directly if that works. */
554 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
555 != CODE_FOR_nothing)
557 emit_unop_insn (code, to, from, equiv_code);
558 return;
560 else
562 scalar_mode intermediate;
563 rtx tmp;
564 int shift_amount;
566 /* Search for a mode to convert via. */
567 opt_scalar_mode intermediate_iter;
568 FOR_EACH_MODE_FROM (intermediate_iter, from_mode)
570 scalar_mode intermediate = intermediate_iter.require ();
571 if (((can_extend_p (to_mode, intermediate, unsignedp)
572 != CODE_FOR_nothing)
573 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
574 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode,
575 intermediate)))
576 && (can_extend_p (intermediate, from_mode, unsignedp)
577 != CODE_FOR_nothing))
579 convert_move (to, convert_to_mode (intermediate, from,
580 unsignedp), unsignedp);
581 return;
585 /* No suitable intermediate mode.
586 Generate what we need with shifts. */
587 shift_amount = (GET_MODE_PRECISION (to_mode)
588 - GET_MODE_PRECISION (from_mode));
589 from = gen_lowpart (to_mode, force_reg (from_mode, from));
590 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
591 to, unsignedp);
592 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
593 to, unsignedp);
594 if (tmp != to)
595 emit_move_insn (to, tmp);
596 return;
600 /* Support special truncate insns for certain modes. */
601 if (convert_optab_handler (trunc_optab, to_mode,
602 from_mode) != CODE_FOR_nothing)
604 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
605 to, from, UNKNOWN);
606 return;
609 /* Handle truncation of volatile memrefs, and so on;
610 the things that couldn't be truncated directly,
611 and for which there was no special instruction.
613 ??? Code above formerly short-circuited this, for most integer
614 mode pairs, with a force_reg in from_mode followed by a recursive
615 call to this routine. Appears always to have been wrong. */
616 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
618 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
619 emit_move_insn (to, temp);
620 return;
623 /* Mode combination is not recognized. */
624 gcc_unreachable ();
627 /* Return an rtx for a value that would result
628 from converting X to mode MODE.
629 Both X and MODE may be floating, or both integer.
630 UNSIGNEDP is nonzero if X is an unsigned value.
631 This can be done by referring to a part of X in place
632 or by copying to a new temporary with conversion. */
635 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
637 return convert_modes (mode, VOIDmode, x, unsignedp);
640 /* Return an rtx for a value that would result
641 from converting X from mode OLDMODE to mode MODE.
642 Both modes may be floating, or both integer.
643 UNSIGNEDP is nonzero if X is an unsigned value.
645 This can be done by referring to a part of X in place
646 or by copying to a new temporary with conversion.
648 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
651 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
653 rtx temp;
654 scalar_int_mode int_mode;
656 /* If FROM is a SUBREG that indicates that we have already done at least
657 the required extension, strip it. */
659 if (GET_CODE (x) == SUBREG
660 && SUBREG_PROMOTED_VAR_P (x)
661 && is_a <scalar_int_mode> (mode, &int_mode)
662 && (GET_MODE_PRECISION (subreg_promoted_mode (x))
663 >= GET_MODE_PRECISION (int_mode))
664 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
665 x = gen_lowpart (int_mode, SUBREG_REG (x));
667 if (GET_MODE (x) != VOIDmode)
668 oldmode = GET_MODE (x);
670 if (mode == oldmode)
671 return x;
673 if (CONST_SCALAR_INT_P (x)
674 && is_int_mode (mode, &int_mode))
676 /* If the caller did not tell us the old mode, then there is not
677 much to do with respect to canonicalization. We have to
678 assume that all the bits are significant. */
679 if (GET_MODE_CLASS (oldmode) != MODE_INT)
680 oldmode = MAX_MODE_INT;
681 wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
682 GET_MODE_PRECISION (int_mode),
683 unsignedp ? UNSIGNED : SIGNED);
684 return immed_wide_int_const (w, int_mode);
687 /* We can do this with a gen_lowpart if both desired and current modes
688 are integer, and this is either a constant integer, a register, or a
689 non-volatile MEM. */
690 scalar_int_mode int_oldmode;
691 if (is_int_mode (mode, &int_mode)
692 && is_int_mode (oldmode, &int_oldmode)
693 && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
694 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
695 || CONST_POLY_INT_P (x)
696 || (REG_P (x)
697 && (!HARD_REGISTER_P (x)
698 || targetm.hard_regno_mode_ok (REGNO (x), int_mode))
699 && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
700 return gen_lowpart (int_mode, x);
702 /* Converting from integer constant into mode is always equivalent to an
703 subreg operation. */
704 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
706 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
707 return simplify_gen_subreg (mode, x, oldmode, 0);
710 temp = gen_reg_rtx (mode);
711 convert_move (temp, x, unsignedp);
712 return temp;
715 /* Return the largest alignment we can use for doing a move (or store)
716 of MAX_PIECES. ALIGN is the largest alignment we could use. */
718 static unsigned int
719 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
721 scalar_int_mode tmode
722 = int_mode_for_size (max_pieces * BITS_PER_UNIT, 1).require ();
724 if (align >= GET_MODE_ALIGNMENT (tmode))
725 align = GET_MODE_ALIGNMENT (tmode);
726 else
728 scalar_int_mode xmode = NARROWEST_INT_MODE;
729 opt_scalar_int_mode mode_iter;
730 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
732 tmode = mode_iter.require ();
733 if (GET_MODE_SIZE (tmode) > max_pieces
734 || targetm.slow_unaligned_access (tmode, align))
735 break;
736 xmode = tmode;
739 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
742 return align;
745 /* Return the widest integer mode that is narrower than SIZE bytes. */
747 static scalar_int_mode
748 widest_int_mode_for_size (unsigned int size)
750 scalar_int_mode result = NARROWEST_INT_MODE;
752 gcc_checking_assert (size > 1);
754 opt_scalar_int_mode tmode;
755 FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
756 if (GET_MODE_SIZE (tmode.require ()) < size)
757 result = tmode.require ();
759 return result;
762 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
763 and should be performed piecewise. */
765 static bool
766 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
767 enum by_pieces_operation op)
769 return targetm.use_by_pieces_infrastructure_p (len, align, op,
770 optimize_insn_for_speed_p ());
773 /* Determine whether the LEN bytes can be moved by using several move
774 instructions. Return nonzero if a call to move_by_pieces should
775 succeed. */
777 bool
778 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
780 return can_do_by_pieces (len, align, MOVE_BY_PIECES);
783 /* Return number of insns required to perform operation OP by pieces
784 for L bytes. ALIGN (in bits) is maximum alignment we can assume. */
786 unsigned HOST_WIDE_INT
787 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
788 unsigned int max_size, by_pieces_operation op)
790 unsigned HOST_WIDE_INT n_insns = 0;
792 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
794 while (max_size > 1 && l > 0)
796 scalar_int_mode mode = widest_int_mode_for_size (max_size);
797 enum insn_code icode;
799 unsigned int modesize = GET_MODE_SIZE (mode);
801 icode = optab_handler (mov_optab, mode);
802 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
804 unsigned HOST_WIDE_INT n_pieces = l / modesize;
805 l %= modesize;
806 switch (op)
808 default:
809 n_insns += n_pieces;
810 break;
812 case COMPARE_BY_PIECES:
813 int batch = targetm.compare_by_pieces_branch_ratio (mode);
814 int batch_ops = 4 * batch - 1;
815 unsigned HOST_WIDE_INT full = n_pieces / batch;
816 n_insns += full * batch_ops;
817 if (n_pieces % batch != 0)
818 n_insns++;
819 break;
823 max_size = modesize;
826 gcc_assert (!l);
827 return n_insns;
830 /* Used when performing piecewise block operations, holds information
831 about one of the memory objects involved. The member functions
832 can be used to generate code for loading from the object and
833 updating the address when iterating. */
835 class pieces_addr
837 /* The object being referenced, a MEM. Can be NULL_RTX to indicate
838 stack pushes. */
839 rtx m_obj;
840 /* The address of the object. Can differ from that seen in the
841 MEM rtx if we copied the address to a register. */
842 rtx m_addr;
843 /* Nonzero if the address on the object has an autoincrement already,
844 signifies whether that was an increment or decrement. */
845 signed char m_addr_inc;
846 /* Nonzero if we intend to use autoinc without the address already
847 having autoinc form. We will insert add insns around each memory
848 reference, expecting later passes to form autoinc addressing modes.
849 The only supported options are predecrement and postincrement. */
850 signed char m_explicit_inc;
851 /* True if we have either of the two possible cases of using
852 autoincrement. */
853 bool m_auto;
854 /* True if this is an address to be used for load operations rather
855 than stores. */
856 bool m_is_load;
858 /* Optionally, a function to obtain constants for any given offset into
859 the objects, and data associated with it. */
860 by_pieces_constfn m_constfn;
861 void *m_cfndata;
862 public:
863 pieces_addr (rtx, bool, by_pieces_constfn, void *);
864 rtx adjust (scalar_int_mode, HOST_WIDE_INT);
865 void increment_address (HOST_WIDE_INT);
866 void maybe_predec (HOST_WIDE_INT);
867 void maybe_postinc (HOST_WIDE_INT);
868 void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
869 int get_addr_inc ()
871 return m_addr_inc;
875 /* Initialize a pieces_addr structure from an object OBJ. IS_LOAD is
876 true if the operation to be performed on this object is a load
877 rather than a store. For stores, OBJ can be NULL, in which case we
878 assume the operation is a stack push. For loads, the optional
879 CONSTFN and its associated CFNDATA can be used in place of the
880 memory load. */
882 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
883 void *cfndata)
884 : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
886 m_addr_inc = 0;
887 m_auto = false;
888 if (obj)
890 rtx addr = XEXP (obj, 0);
891 rtx_code code = GET_CODE (addr);
892 m_addr = addr;
893 bool dec = code == PRE_DEC || code == POST_DEC;
894 bool inc = code == PRE_INC || code == POST_INC;
895 m_auto = inc || dec;
896 if (m_auto)
897 m_addr_inc = dec ? -1 : 1;
899 /* While we have always looked for these codes here, the code
900 implementing the memory operation has never handled them.
901 Support could be added later if necessary or beneficial. */
902 gcc_assert (code != PRE_INC && code != POST_DEC);
904 else
906 m_addr = NULL_RTX;
907 if (!is_load)
909 m_auto = true;
910 if (STACK_GROWS_DOWNWARD)
911 m_addr_inc = -1;
912 else
913 m_addr_inc = 1;
915 else
916 gcc_assert (constfn != NULL);
918 m_explicit_inc = 0;
919 if (constfn)
920 gcc_assert (is_load);
923 /* Decide whether to use autoinc for an address involved in a memory op.
924 MODE is the mode of the accesses, REVERSE is true if we've decided to
925 perform the operation starting from the end, and LEN is the length of
926 the operation. Don't override an earlier decision to set m_auto. */
928 void
929 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
930 HOST_WIDE_INT len)
932 if (m_auto || m_obj == NULL_RTX)
933 return;
935 bool use_predec = (m_is_load
936 ? USE_LOAD_PRE_DECREMENT (mode)
937 : USE_STORE_PRE_DECREMENT (mode));
938 bool use_postinc = (m_is_load
939 ? USE_LOAD_POST_INCREMENT (mode)
940 : USE_STORE_POST_INCREMENT (mode));
941 machine_mode addr_mode = get_address_mode (m_obj);
943 if (use_predec && reverse)
945 m_addr = copy_to_mode_reg (addr_mode,
946 plus_constant (addr_mode,
947 m_addr, len));
948 m_auto = true;
949 m_explicit_inc = -1;
951 else if (use_postinc && !reverse)
953 m_addr = copy_to_mode_reg (addr_mode, m_addr);
954 m_auto = true;
955 m_explicit_inc = 1;
957 else if (CONSTANT_P (m_addr))
958 m_addr = copy_to_mode_reg (addr_mode, m_addr);
961 /* Adjust the address to refer to the data at OFFSET in MODE. If we
962 are using autoincrement for this address, we don't add the offset,
963 but we still modify the MEM's properties. */
966 pieces_addr::adjust (scalar_int_mode mode, HOST_WIDE_INT offset)
968 if (m_constfn)
969 return m_constfn (m_cfndata, offset, mode);
970 if (m_obj == NULL_RTX)
971 return NULL_RTX;
972 if (m_auto)
973 return adjust_automodify_address (m_obj, mode, m_addr, offset);
974 else
975 return adjust_address (m_obj, mode, offset);
978 /* Emit an add instruction to increment the address by SIZE. */
980 void
981 pieces_addr::increment_address (HOST_WIDE_INT size)
983 rtx amount = gen_int_mode (size, GET_MODE (m_addr));
984 emit_insn (gen_add2_insn (m_addr, amount));
987 /* If we are supposed to decrement the address after each access, emit code
988 to do so now. Increment by SIZE (which has should have the correct sign
989 already). */
991 void
992 pieces_addr::maybe_predec (HOST_WIDE_INT size)
994 if (m_explicit_inc >= 0)
995 return;
996 gcc_assert (HAVE_PRE_DECREMENT);
997 increment_address (size);
1000 /* If we are supposed to decrement the address after each access, emit code
1001 to do so now. Increment by SIZE. */
1003 void
1004 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
1006 if (m_explicit_inc <= 0)
1007 return;
1008 gcc_assert (HAVE_POST_INCREMENT);
1009 increment_address (size);
1012 /* This structure is used by do_op_by_pieces to describe the operation
1013 to be performed. */
1015 class op_by_pieces_d
1017 protected:
1018 pieces_addr m_to, m_from;
1019 unsigned HOST_WIDE_INT m_len;
1020 HOST_WIDE_INT m_offset;
1021 unsigned int m_align;
1022 unsigned int m_max_size;
1023 bool m_reverse;
1025 /* Virtual functions, overriden by derived classes for the specific
1026 operation. */
1027 virtual void generate (rtx, rtx, machine_mode) = 0;
1028 virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1029 virtual void finish_mode (machine_mode)
1033 public:
1034 op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1035 unsigned HOST_WIDE_INT, unsigned int);
1036 void run ();
1039 /* The constructor for an op_by_pieces_d structure. We require two
1040 objects named TO and FROM, which are identified as loads or stores
1041 by TO_LOAD and FROM_LOAD. If FROM is a load, the optional FROM_CFN
1042 and its associated FROM_CFN_DATA can be used to replace loads with
1043 constant values. LEN describes the length of the operation. */
1045 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1046 rtx from, bool from_load,
1047 by_pieces_constfn from_cfn,
1048 void *from_cfn_data,
1049 unsigned HOST_WIDE_INT len,
1050 unsigned int align)
1051 : m_to (to, to_load, NULL, NULL),
1052 m_from (from, from_load, from_cfn, from_cfn_data),
1053 m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1055 int toi = m_to.get_addr_inc ();
1056 int fromi = m_from.get_addr_inc ();
1057 if (toi >= 0 && fromi >= 0)
1058 m_reverse = false;
1059 else if (toi <= 0 && fromi <= 0)
1060 m_reverse = true;
1061 else
1062 gcc_unreachable ();
1064 m_offset = m_reverse ? len : 0;
1065 align = MIN (to ? MEM_ALIGN (to) : align,
1066 from ? MEM_ALIGN (from) : align);
1068 /* If copying requires more than two move insns,
1069 copy addresses to registers (to make displacements shorter)
1070 and use post-increment if available. */
1071 if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1073 /* Find the mode of the largest comparison. */
1074 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1076 m_from.decide_autoinc (mode, m_reverse, len);
1077 m_to.decide_autoinc (mode, m_reverse, len);
1080 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1081 m_align = align;
1084 /* This function contains the main loop used for expanding a block
1085 operation. First move what we can in the largest integer mode,
1086 then go to successively smaller modes. For every access, call
1087 GENFUN with the two operands and the EXTRA_DATA. */
1089 void
1090 op_by_pieces_d::run ()
1092 while (m_max_size > 1 && m_len > 0)
1094 scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1096 if (prepare_mode (mode, m_align))
1098 unsigned int size = GET_MODE_SIZE (mode);
1099 rtx to1 = NULL_RTX, from1;
1101 while (m_len >= size)
1103 if (m_reverse)
1104 m_offset -= size;
1106 to1 = m_to.adjust (mode, m_offset);
1107 from1 = m_from.adjust (mode, m_offset);
1109 m_to.maybe_predec (-(HOST_WIDE_INT)size);
1110 m_from.maybe_predec (-(HOST_WIDE_INT)size);
1112 generate (to1, from1, mode);
1114 m_to.maybe_postinc (size);
1115 m_from.maybe_postinc (size);
1117 if (!m_reverse)
1118 m_offset += size;
1120 m_len -= size;
1123 finish_mode (mode);
1126 m_max_size = GET_MODE_SIZE (mode);
1129 /* The code above should have handled everything. */
1130 gcc_assert (!m_len);
1133 /* Derived class from op_by_pieces_d, providing support for block move
1134 operations. */
1136 class move_by_pieces_d : public op_by_pieces_d
1138 insn_gen_fn m_gen_fun;
1139 void generate (rtx, rtx, machine_mode);
1140 bool prepare_mode (machine_mode, unsigned int);
1142 public:
1143 move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1144 unsigned int align)
1145 : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1148 rtx finish_endp (int);
1151 /* Return true if MODE can be used for a set of copies, given an
1152 alignment ALIGN. Prepare whatever data is necessary for later
1153 calls to generate. */
1155 bool
1156 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1158 insn_code icode = optab_handler (mov_optab, mode);
1159 m_gen_fun = GEN_FCN (icode);
1160 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1163 /* A callback used when iterating for a compare_by_pieces_operation.
1164 OP0 and OP1 are the values that have been loaded and should be
1165 compared in MODE. If OP0 is NULL, this means we should generate a
1166 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1167 gen function that should be used to generate the mode. */
1169 void
1170 move_by_pieces_d::generate (rtx op0, rtx op1,
1171 machine_mode mode ATTRIBUTE_UNUSED)
1173 #ifdef PUSH_ROUNDING
1174 if (op0 == NULL_RTX)
1176 emit_single_push_insn (mode, op1, NULL);
1177 return;
1179 #endif
1180 emit_insn (m_gen_fun (op0, op1));
1183 /* Perform the final adjustment at the end of a string to obtain the
1184 correct return value for the block operation. If ENDP is 1 return
1185 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1186 end minus one byte ala stpcpy. */
1189 move_by_pieces_d::finish_endp (int endp)
1191 gcc_assert (!m_reverse);
1192 if (endp == 2)
1194 m_to.maybe_postinc (-1);
1195 --m_offset;
1197 return m_to.adjust (QImode, m_offset);
1200 /* Generate several move instructions to copy LEN bytes from block FROM to
1201 block TO. (These are MEM rtx's with BLKmode).
1203 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1204 used to push FROM to the stack.
1206 ALIGN is maximum stack alignment we can assume.
1208 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1209 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1210 stpcpy. */
1213 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1214 unsigned int align, int endp)
1216 #ifndef PUSH_ROUNDING
1217 if (to == NULL)
1218 gcc_unreachable ();
1219 #endif
1221 move_by_pieces_d data (to, from, len, align);
1223 data.run ();
1225 if (endp)
1226 return data.finish_endp (endp);
1227 else
1228 return to;
1231 /* Derived class from op_by_pieces_d, providing support for block move
1232 operations. */
1234 class store_by_pieces_d : public op_by_pieces_d
1236 insn_gen_fn m_gen_fun;
1237 void generate (rtx, rtx, machine_mode);
1238 bool prepare_mode (machine_mode, unsigned int);
1240 public:
1241 store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1242 unsigned HOST_WIDE_INT len, unsigned int align)
1243 : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1246 rtx finish_endp (int);
1249 /* Return true if MODE can be used for a set of stores, given an
1250 alignment ALIGN. Prepare whatever data is necessary for later
1251 calls to generate. */
1253 bool
1254 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1256 insn_code icode = optab_handler (mov_optab, mode);
1257 m_gen_fun = GEN_FCN (icode);
1258 return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1261 /* A callback used when iterating for a store_by_pieces_operation.
1262 OP0 and OP1 are the values that have been loaded and should be
1263 compared in MODE. If OP0 is NULL, this means we should generate a
1264 push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1265 gen function that should be used to generate the mode. */
1267 void
1268 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1270 emit_insn (m_gen_fun (op0, op1));
1273 /* Perform the final adjustment at the end of a string to obtain the
1274 correct return value for the block operation. If ENDP is 1 return
1275 memory at the end ala mempcpy, and if ENDP is 2 return memory the
1276 end minus one byte ala stpcpy. */
1279 store_by_pieces_d::finish_endp (int endp)
1281 gcc_assert (!m_reverse);
1282 if (endp == 2)
1284 m_to.maybe_postinc (-1);
1285 --m_offset;
1287 return m_to.adjust (QImode, m_offset);
1290 /* Determine whether the LEN bytes generated by CONSTFUN can be
1291 stored to memory using several move instructions. CONSTFUNDATA is
1292 a pointer which will be passed as argument in every CONSTFUN call.
1293 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1294 a memset operation and false if it's a copy of a constant string.
1295 Return nonzero if a call to store_by_pieces should succeed. */
1298 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1299 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1300 void *constfundata, unsigned int align, bool memsetp)
1302 unsigned HOST_WIDE_INT l;
1303 unsigned int max_size;
1304 HOST_WIDE_INT offset = 0;
1305 enum insn_code icode;
1306 int reverse;
1307 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
1308 rtx cst ATTRIBUTE_UNUSED;
1310 if (len == 0)
1311 return 1;
1313 if (!targetm.use_by_pieces_infrastructure_p (len, align,
1314 memsetp
1315 ? SET_BY_PIECES
1316 : STORE_BY_PIECES,
1317 optimize_insn_for_speed_p ()))
1318 return 0;
1320 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1322 /* We would first store what we can in the largest integer mode, then go to
1323 successively smaller modes. */
1325 for (reverse = 0;
1326 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1327 reverse++)
1329 l = len;
1330 max_size = STORE_MAX_PIECES + 1;
1331 while (max_size > 1 && l > 0)
1333 scalar_int_mode mode = widest_int_mode_for_size (max_size);
1335 icode = optab_handler (mov_optab, mode);
1336 if (icode != CODE_FOR_nothing
1337 && align >= GET_MODE_ALIGNMENT (mode))
1339 unsigned int size = GET_MODE_SIZE (mode);
1341 while (l >= size)
1343 if (reverse)
1344 offset -= size;
1346 cst = (*constfun) (constfundata, offset, mode);
1347 if (!targetm.legitimate_constant_p (mode, cst))
1348 return 0;
1350 if (!reverse)
1351 offset += size;
1353 l -= size;
1357 max_size = GET_MODE_SIZE (mode);
1360 /* The code above should have handled everything. */
1361 gcc_assert (!l);
1364 return 1;
1367 /* Generate several move instructions to store LEN bytes generated by
1368 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
1369 pointer which will be passed as argument in every CONSTFUN call.
1370 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
1371 a memset operation and false if it's a copy of a constant string.
1372 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1373 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1374 stpcpy. */
1377 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1378 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1379 void *constfundata, unsigned int align, bool memsetp, int endp)
1381 if (len == 0)
1383 gcc_assert (endp != 2);
1384 return to;
1387 gcc_assert (targetm.use_by_pieces_infrastructure_p
1388 (len, align,
1389 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1390 optimize_insn_for_speed_p ()));
1392 store_by_pieces_d data (to, constfun, constfundata, len, align);
1393 data.run ();
1395 if (endp)
1396 return data.finish_endp (endp);
1397 else
1398 return to;
1401 /* Callback routine for clear_by_pieces.
1402 Return const0_rtx unconditionally. */
1404 static rtx
1405 clear_by_pieces_1 (void *, HOST_WIDE_INT, scalar_int_mode)
1407 return const0_rtx;
1410 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
1411 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
1413 static void
1414 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1416 if (len == 0)
1417 return;
1419 store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1420 data.run ();
1423 /* Context used by compare_by_pieces_genfn. It stores the fail label
1424 to jump to in case of miscomparison, and for branch ratios greater than 1,
1425 it stores an accumulator and the current and maximum counts before
1426 emitting another branch. */
1428 class compare_by_pieces_d : public op_by_pieces_d
1430 rtx_code_label *m_fail_label;
1431 rtx m_accumulator;
1432 int m_count, m_batch;
1434 void generate (rtx, rtx, machine_mode);
1435 bool prepare_mode (machine_mode, unsigned int);
1436 void finish_mode (machine_mode);
1437 public:
1438 compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1439 void *op1_cfn_data, HOST_WIDE_INT len, int align,
1440 rtx_code_label *fail_label)
1441 : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1443 m_fail_label = fail_label;
1447 /* A callback used when iterating for a compare_by_pieces_operation.
1448 OP0 and OP1 are the values that have been loaded and should be
1449 compared in MODE. DATA holds a pointer to the compare_by_pieces_data
1450 context structure. */
1452 void
1453 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1455 if (m_batch > 1)
1457 rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1458 true, OPTAB_LIB_WIDEN);
1459 if (m_count != 0)
1460 temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1461 true, OPTAB_LIB_WIDEN);
1462 m_accumulator = temp;
1464 if (++m_count < m_batch)
1465 return;
1467 m_count = 0;
1468 op0 = m_accumulator;
1469 op1 = const0_rtx;
1470 m_accumulator = NULL_RTX;
1472 do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1473 m_fail_label, profile_probability::uninitialized ());
1476 /* Return true if MODE can be used for a set of moves and comparisons,
1477 given an alignment ALIGN. Prepare whatever data is necessary for
1478 later calls to generate. */
1480 bool
1481 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1483 insn_code icode = optab_handler (mov_optab, mode);
1484 if (icode == CODE_FOR_nothing
1485 || align < GET_MODE_ALIGNMENT (mode)
1486 || !can_compare_p (EQ, mode, ccp_jump))
1487 return false;
1488 m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1489 if (m_batch < 0)
1490 return false;
1491 m_accumulator = NULL_RTX;
1492 m_count = 0;
1493 return true;
1496 /* Called after expanding a series of comparisons in MODE. If we have
1497 accumulated results for which we haven't emitted a branch yet, do
1498 so now. */
1500 void
1501 compare_by_pieces_d::finish_mode (machine_mode mode)
1503 if (m_accumulator != NULL_RTX)
1504 do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1505 NULL_RTX, NULL, m_fail_label,
1506 profile_probability::uninitialized ());
1509 /* Generate several move instructions to compare LEN bytes from blocks
1510 ARG0 and ARG1. (These are MEM rtx's with BLKmode).
1512 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1513 used to push FROM to the stack.
1515 ALIGN is maximum stack alignment we can assume.
1517 Optionally, the caller can pass a constfn and associated data in A1_CFN
1518 and A1_CFN_DATA. describing that the second operand being compared is a
1519 known constant and how to obtain its data. */
1521 static rtx
1522 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1523 rtx target, unsigned int align,
1524 by_pieces_constfn a1_cfn, void *a1_cfn_data)
1526 rtx_code_label *fail_label = gen_label_rtx ();
1527 rtx_code_label *end_label = gen_label_rtx ();
1529 if (target == NULL_RTX
1530 || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1531 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1533 compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1534 fail_label);
1536 data.run ();
1538 emit_move_insn (target, const0_rtx);
1539 emit_jump (end_label);
1540 emit_barrier ();
1541 emit_label (fail_label);
1542 emit_move_insn (target, const1_rtx);
1543 emit_label (end_label);
1545 return target;
1548 /* Emit code to move a block Y to a block X. This may be done with
1549 string-move instructions, with multiple scalar move instructions,
1550 or with a library call.
1552 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1553 SIZE is an rtx that says how long they are.
1554 ALIGN is the maximum alignment we can assume they have.
1555 METHOD describes what kind of copy this is, and what mechanisms may be used.
1556 MIN_SIZE is the minimal size of block to move
1557 MAX_SIZE is the maximal size of block to move, if it can not be represented
1558 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1560 Return the address of the new block, if memcpy is called and returns it,
1561 0 otherwise. */
1564 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1565 unsigned int expected_align, HOST_WIDE_INT expected_size,
1566 unsigned HOST_WIDE_INT min_size,
1567 unsigned HOST_WIDE_INT max_size,
1568 unsigned HOST_WIDE_INT probable_max_size)
1570 bool may_use_call;
1571 rtx retval = 0;
1572 unsigned int align;
1574 gcc_assert (size);
1575 if (CONST_INT_P (size) && INTVAL (size) == 0)
1576 return 0;
1578 switch (method)
1580 case BLOCK_OP_NORMAL:
1581 case BLOCK_OP_TAILCALL:
1582 may_use_call = true;
1583 break;
1585 case BLOCK_OP_CALL_PARM:
1586 may_use_call = block_move_libcall_safe_for_call_parm ();
1588 /* Make inhibit_defer_pop nonzero around the library call
1589 to force it to pop the arguments right away. */
1590 NO_DEFER_POP;
1591 break;
1593 case BLOCK_OP_NO_LIBCALL:
1594 may_use_call = false;
1595 break;
1597 default:
1598 gcc_unreachable ();
1601 gcc_assert (MEM_P (x) && MEM_P (y));
1602 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1603 gcc_assert (align >= BITS_PER_UNIT);
1605 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1606 block copy is more efficient for other large modes, e.g. DCmode. */
1607 x = adjust_address (x, BLKmode, 0);
1608 y = adjust_address (y, BLKmode, 0);
1610 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1611 can be incorrect is coming from __builtin_memcpy. */
1612 if (CONST_INT_P (size))
1614 x = shallow_copy_rtx (x);
1615 y = shallow_copy_rtx (y);
1616 set_mem_size (x, INTVAL (size));
1617 set_mem_size (y, INTVAL (size));
1620 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1621 move_by_pieces (x, y, INTVAL (size), align, 0);
1622 else if (emit_block_move_via_movmem (x, y, size, align,
1623 expected_align, expected_size,
1624 min_size, max_size, probable_max_size))
1626 else if (may_use_call
1627 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1628 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1630 /* Since x and y are passed to a libcall, mark the corresponding
1631 tree EXPR as addressable. */
1632 tree y_expr = MEM_EXPR (y);
1633 tree x_expr = MEM_EXPR (x);
1634 if (y_expr)
1635 mark_addressable (y_expr);
1636 if (x_expr)
1637 mark_addressable (x_expr);
1638 retval = emit_block_copy_via_libcall (x, y, size,
1639 method == BLOCK_OP_TAILCALL);
1642 else
1643 emit_block_move_via_loop (x, y, size, align);
1645 if (method == BLOCK_OP_CALL_PARM)
1646 OK_DEFER_POP;
1648 return retval;
1652 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1654 unsigned HOST_WIDE_INT max, min = 0;
1655 if (GET_CODE (size) == CONST_INT)
1656 min = max = UINTVAL (size);
1657 else
1658 max = GET_MODE_MASK (GET_MODE (size));
1659 return emit_block_move_hints (x, y, size, method, 0, -1,
1660 min, max, max);
1663 /* A subroutine of emit_block_move. Returns true if calling the
1664 block move libcall will not clobber any parameters which may have
1665 already been placed on the stack. */
1667 static bool
1668 block_move_libcall_safe_for_call_parm (void)
1670 #if defined (REG_PARM_STACK_SPACE)
1671 tree fn;
1672 #endif
1674 /* If arguments are pushed on the stack, then they're safe. */
1675 if (PUSH_ARGS)
1676 return true;
1678 /* If registers go on the stack anyway, any argument is sure to clobber
1679 an outgoing argument. */
1680 #if defined (REG_PARM_STACK_SPACE)
1681 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1682 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1683 depend on its argument. */
1684 (void) fn;
1685 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1686 && REG_PARM_STACK_SPACE (fn) != 0)
1687 return false;
1688 #endif
1690 /* If any argument goes in memory, then it might clobber an outgoing
1691 argument. */
1693 CUMULATIVE_ARGS args_so_far_v;
1694 cumulative_args_t args_so_far;
1695 tree fn, arg;
1697 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1698 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1699 args_so_far = pack_cumulative_args (&args_so_far_v);
1701 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1702 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1704 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1705 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1706 NULL_TREE, true);
1707 if (!tmp || !REG_P (tmp))
1708 return false;
1709 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1710 return false;
1711 targetm.calls.function_arg_advance (args_so_far, mode,
1712 NULL_TREE, true);
1715 return true;
1718 /* A subroutine of emit_block_move. Expand a movmem pattern;
1719 return true if successful. */
1721 static bool
1722 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1723 unsigned int expected_align, HOST_WIDE_INT expected_size,
1724 unsigned HOST_WIDE_INT min_size,
1725 unsigned HOST_WIDE_INT max_size,
1726 unsigned HOST_WIDE_INT probable_max_size)
1728 int save_volatile_ok = volatile_ok;
1730 if (expected_align < align)
1731 expected_align = align;
1732 if (expected_size != -1)
1734 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1735 expected_size = probable_max_size;
1736 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1737 expected_size = min_size;
1740 /* Since this is a move insn, we don't care about volatility. */
1741 volatile_ok = 1;
1743 /* Try the most limited insn first, because there's no point
1744 including more than one in the machine description unless
1745 the more limited one has some advantage. */
1747 opt_scalar_int_mode mode_iter;
1748 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1750 scalar_int_mode mode = mode_iter.require ();
1751 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1753 if (code != CODE_FOR_nothing
1754 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1755 here because if SIZE is less than the mode mask, as it is
1756 returned by the macro, it will definitely be less than the
1757 actual mode mask. Since SIZE is within the Pmode address
1758 space, we limit MODE to Pmode. */
1759 && ((CONST_INT_P (size)
1760 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1761 <= (GET_MODE_MASK (mode) >> 1)))
1762 || max_size <= (GET_MODE_MASK (mode) >> 1)
1763 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1765 struct expand_operand ops[9];
1766 unsigned int nops;
1768 /* ??? When called via emit_block_move_for_call, it'd be
1769 nice if there were some way to inform the backend, so
1770 that it doesn't fail the expansion because it thinks
1771 emitting the libcall would be more efficient. */
1772 nops = insn_data[(int) code].n_generator_args;
1773 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1775 create_fixed_operand (&ops[0], x);
1776 create_fixed_operand (&ops[1], y);
1777 /* The check above guarantees that this size conversion is valid. */
1778 create_convert_operand_to (&ops[2], size, mode, true);
1779 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1780 if (nops >= 6)
1782 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1783 create_integer_operand (&ops[5], expected_size);
1785 if (nops >= 8)
1787 create_integer_operand (&ops[6], min_size);
1788 /* If we can not represent the maximal size,
1789 make parameter NULL. */
1790 if ((HOST_WIDE_INT) max_size != -1)
1791 create_integer_operand (&ops[7], max_size);
1792 else
1793 create_fixed_operand (&ops[7], NULL);
1795 if (nops == 9)
1797 /* If we can not represent the maximal size,
1798 make parameter NULL. */
1799 if ((HOST_WIDE_INT) probable_max_size != -1)
1800 create_integer_operand (&ops[8], probable_max_size);
1801 else
1802 create_fixed_operand (&ops[8], NULL);
1804 if (maybe_expand_insn (code, nops, ops))
1806 volatile_ok = save_volatile_ok;
1807 return true;
1812 volatile_ok = save_volatile_ok;
1813 return false;
1816 /* A subroutine of emit_block_move. Copy the data via an explicit
1817 loop. This is used only when libcalls are forbidden. */
1818 /* ??? It'd be nice to copy in hunks larger than QImode. */
1820 static void
1821 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1822 unsigned int align ATTRIBUTE_UNUSED)
1824 rtx_code_label *cmp_label, *top_label;
1825 rtx iter, x_addr, y_addr, tmp;
1826 machine_mode x_addr_mode = get_address_mode (x);
1827 machine_mode y_addr_mode = get_address_mode (y);
1828 machine_mode iter_mode;
1830 iter_mode = GET_MODE (size);
1831 if (iter_mode == VOIDmode)
1832 iter_mode = word_mode;
1834 top_label = gen_label_rtx ();
1835 cmp_label = gen_label_rtx ();
1836 iter = gen_reg_rtx (iter_mode);
1838 emit_move_insn (iter, const0_rtx);
1840 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1841 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1842 do_pending_stack_adjust ();
1844 emit_jump (cmp_label);
1845 emit_label (top_label);
1847 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1848 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1850 if (x_addr_mode != y_addr_mode)
1851 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1852 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1854 x = change_address (x, QImode, x_addr);
1855 y = change_address (y, QImode, y_addr);
1857 emit_move_insn (x, y);
1859 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1860 true, OPTAB_LIB_WIDEN);
1861 if (tmp != iter)
1862 emit_move_insn (iter, tmp);
1864 emit_label (cmp_label);
1866 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1867 true, top_label,
1868 profile_probability::guessed_always ()
1869 .apply_scale (9, 10));
1872 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1873 TAILCALL is true if this is a tail call. */
1876 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1877 rtx size, bool tailcall)
1879 rtx dst_addr, src_addr;
1880 tree call_expr, dst_tree, src_tree, size_tree;
1881 machine_mode size_mode;
1883 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1884 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1885 dst_tree = make_tree (ptr_type_node, dst_addr);
1887 src_addr = copy_addr_to_reg (XEXP (src, 0));
1888 src_addr = convert_memory_address (ptr_mode, src_addr);
1889 src_tree = make_tree (ptr_type_node, src_addr);
1891 size_mode = TYPE_MODE (sizetype);
1892 size = convert_to_mode (size_mode, size, 1);
1893 size = copy_to_mode_reg (size_mode, size);
1894 size_tree = make_tree (sizetype, size);
1896 /* It is incorrect to use the libcall calling conventions for calls to
1897 memcpy/memmove/memcmp because they can be provided by the user. */
1898 tree fn = builtin_decl_implicit (fncode);
1899 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1900 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1902 return expand_call (call_expr, NULL_RTX, false);
1905 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1906 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
1907 otherwise return null. */
1910 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1911 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1912 HOST_WIDE_INT align)
1914 machine_mode insn_mode = insn_data[icode].operand[0].mode;
1916 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1917 target = NULL_RTX;
1919 struct expand_operand ops[5];
1920 create_output_operand (&ops[0], target, insn_mode);
1921 create_fixed_operand (&ops[1], arg1_rtx);
1922 create_fixed_operand (&ops[2], arg2_rtx);
1923 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1924 TYPE_UNSIGNED (arg3_type));
1925 create_integer_operand (&ops[4], align);
1926 if (maybe_expand_insn (icode, 5, ops))
1927 return ops[0].value;
1928 return NULL_RTX;
1931 /* Expand a block compare between X and Y with length LEN using the
1932 cmpmem optab, placing the result in TARGET. LEN_TYPE is the type
1933 of the expression that was used to calculate the length. ALIGN
1934 gives the known minimum common alignment. */
1936 static rtx
1937 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
1938 unsigned align)
1940 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1941 implementing memcmp because it will stop if it encounters two
1942 zero bytes. */
1943 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
1945 if (icode == CODE_FOR_nothing)
1946 return NULL_RTX;
1948 return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
1951 /* Emit code to compare a block Y to a block X. This may be done with
1952 string-compare instructions, with multiple scalar instructions,
1953 or with a library call.
1955 Both X and Y must be MEM rtx's. LEN is an rtx that says how long
1956 they are. LEN_TYPE is the type of the expression that was used to
1957 calculate it.
1959 If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1960 value of a normal memcmp call, instead we can just compare for equality.
1961 If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1962 returning NULL_RTX.
1964 Optionally, the caller can pass a constfn and associated data in Y_CFN
1965 and Y_CFN_DATA. describing that the second operand being compared is a
1966 known constant and how to obtain its data.
1967 Return the result of the comparison, or NULL_RTX if we failed to
1968 perform the operation. */
1971 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
1972 bool equality_only, by_pieces_constfn y_cfn,
1973 void *y_cfndata)
1975 rtx result = 0;
1977 if (CONST_INT_P (len) && INTVAL (len) == 0)
1978 return const0_rtx;
1980 gcc_assert (MEM_P (x) && MEM_P (y));
1981 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1982 gcc_assert (align >= BITS_PER_UNIT);
1984 x = adjust_address (x, BLKmode, 0);
1985 y = adjust_address (y, BLKmode, 0);
1987 if (equality_only
1988 && CONST_INT_P (len)
1989 && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
1990 result = compare_by_pieces (x, y, INTVAL (len), target, align,
1991 y_cfn, y_cfndata);
1992 else
1993 result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
1995 return result;
1998 /* Copy all or part of a value X into registers starting at REGNO.
1999 The number of registers to be filled is NREGS. */
2001 void
2002 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
2004 if (nregs == 0)
2005 return;
2007 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
2008 x = validize_mem (force_const_mem (mode, x));
2010 /* See if the machine can do this with a load multiple insn. */
2011 if (targetm.have_load_multiple ())
2013 rtx_insn *last = get_last_insn ();
2014 rtx first = gen_rtx_REG (word_mode, regno);
2015 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2016 GEN_INT (nregs)))
2018 emit_insn (pat);
2019 return;
2021 else
2022 delete_insns_since (last);
2025 for (int i = 0; i < nregs; i++)
2026 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2027 operand_subword_force (x, i, mode));
2030 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2031 The number of registers to be filled is NREGS. */
2033 void
2034 move_block_from_reg (int regno, rtx x, int nregs)
2036 if (nregs == 0)
2037 return;
2039 /* See if the machine can do this with a store multiple insn. */
2040 if (targetm.have_store_multiple ())
2042 rtx_insn *last = get_last_insn ();
2043 rtx first = gen_rtx_REG (word_mode, regno);
2044 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2045 GEN_INT (nregs)))
2047 emit_insn (pat);
2048 return;
2050 else
2051 delete_insns_since (last);
2054 for (int i = 0; i < nregs; i++)
2056 rtx tem = operand_subword (x, i, 1, BLKmode);
2058 gcc_assert (tem);
2060 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2064 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2065 ORIG, where ORIG is a non-consecutive group of registers represented by
2066 a PARALLEL. The clone is identical to the original except in that the
2067 original set of registers is replaced by a new set of pseudo registers.
2068 The new set has the same modes as the original set. */
2071 gen_group_rtx (rtx orig)
2073 int i, length;
2074 rtx *tmps;
2076 gcc_assert (GET_CODE (orig) == PARALLEL);
2078 length = XVECLEN (orig, 0);
2079 tmps = XALLOCAVEC (rtx, length);
2081 /* Skip a NULL entry in first slot. */
2082 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2084 if (i)
2085 tmps[0] = 0;
2087 for (; i < length; i++)
2089 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2090 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2092 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2095 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2098 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
2099 except that values are placed in TMPS[i], and must later be moved
2100 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
2102 static void
2103 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
2105 rtx src;
2106 int start, i;
2107 machine_mode m = GET_MODE (orig_src);
2109 gcc_assert (GET_CODE (dst) == PARALLEL);
2111 if (m != VOIDmode
2112 && !SCALAR_INT_MODE_P (m)
2113 && !MEM_P (orig_src)
2114 && GET_CODE (orig_src) != CONCAT)
2116 scalar_int_mode imode;
2117 if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2119 src = gen_reg_rtx (imode);
2120 emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2122 else
2124 src = assign_stack_temp (GET_MODE (orig_src), ssize);
2125 emit_move_insn (src, orig_src);
2127 emit_group_load_1 (tmps, dst, src, type, ssize);
2128 return;
2131 /* Check for a NULL entry, used to indicate that the parameter goes
2132 both on the stack and in registers. */
2133 if (XEXP (XVECEXP (dst, 0, 0), 0))
2134 start = 0;
2135 else
2136 start = 1;
2138 /* Process the pieces. */
2139 for (i = start; i < XVECLEN (dst, 0); i++)
2141 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2142 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2143 unsigned int bytelen = GET_MODE_SIZE (mode);
2144 int shift = 0;
2146 /* Handle trailing fragments that run over the size of the struct. */
2147 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2149 /* Arrange to shift the fragment to where it belongs.
2150 extract_bit_field loads to the lsb of the reg. */
2151 if (
2152 #ifdef BLOCK_REG_PADDING
2153 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2154 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2155 #else
2156 BYTES_BIG_ENDIAN
2157 #endif
2159 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2160 bytelen = ssize - bytepos;
2161 gcc_assert (bytelen > 0);
2164 /* If we won't be loading directly from memory, protect the real source
2165 from strange tricks we might play; but make sure that the source can
2166 be loaded directly into the destination. */
2167 src = orig_src;
2168 if (!MEM_P (orig_src)
2169 && (!CONSTANT_P (orig_src)
2170 || (GET_MODE (orig_src) != mode
2171 && GET_MODE (orig_src) != VOIDmode)))
2173 if (GET_MODE (orig_src) == VOIDmode)
2174 src = gen_reg_rtx (mode);
2175 else
2176 src = gen_reg_rtx (GET_MODE (orig_src));
2178 emit_move_insn (src, orig_src);
2181 /* Optimize the access just a bit. */
2182 if (MEM_P (src)
2183 && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src))
2184 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2185 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2186 && bytelen == GET_MODE_SIZE (mode))
2188 tmps[i] = gen_reg_rtx (mode);
2189 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2191 else if (COMPLEX_MODE_P (mode)
2192 && GET_MODE (src) == mode
2193 && bytelen == GET_MODE_SIZE (mode))
2194 /* Let emit_move_complex do the bulk of the work. */
2195 tmps[i] = src;
2196 else if (GET_CODE (src) == CONCAT)
2198 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2199 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2200 unsigned int elt = bytepos / slen0;
2201 unsigned int subpos = bytepos % slen0;
2203 if (subpos + bytelen <= slen0)
2205 /* The following assumes that the concatenated objects all
2206 have the same size. In this case, a simple calculation
2207 can be used to determine the object and the bit field
2208 to be extracted. */
2209 tmps[i] = XEXP (src, elt);
2210 if (subpos != 0
2211 || subpos + bytelen != slen0
2212 || (!CONSTANT_P (tmps[i])
2213 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2214 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2215 subpos * BITS_PER_UNIT,
2216 1, NULL_RTX, mode, mode, false,
2217 NULL);
2219 else
2221 rtx mem;
2223 gcc_assert (!bytepos);
2224 mem = assign_stack_temp (GET_MODE (src), slen);
2225 emit_move_insn (mem, src);
2226 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2227 0, 1, NULL_RTX, mode, mode, false,
2228 NULL);
2231 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2232 SIMD register, which is currently broken. While we get GCC
2233 to emit proper RTL for these cases, let's dump to memory. */
2234 else if (VECTOR_MODE_P (GET_MODE (dst))
2235 && REG_P (src))
2237 int slen = GET_MODE_SIZE (GET_MODE (src));
2238 rtx mem;
2240 mem = assign_stack_temp (GET_MODE (src), slen);
2241 emit_move_insn (mem, src);
2242 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2244 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2245 && XVECLEN (dst, 0) > 1)
2246 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2247 else if (CONSTANT_P (src))
2249 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
2251 if (len == ssize)
2252 tmps[i] = src;
2253 else
2255 rtx first, second;
2257 /* TODO: const_wide_int can have sizes other than this... */
2258 gcc_assert (2 * len == ssize);
2259 split_double (src, &first, &second);
2260 if (i)
2261 tmps[i] = second;
2262 else
2263 tmps[i] = first;
2266 else if (REG_P (src) && GET_MODE (src) == mode)
2267 tmps[i] = src;
2268 else
2269 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2270 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2271 mode, mode, false, NULL);
2273 if (shift)
2274 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2275 shift, tmps[i], 0);
2279 /* Emit code to move a block SRC of type TYPE to a block DST,
2280 where DST is non-consecutive registers represented by a PARALLEL.
2281 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2282 if not known. */
2284 void
2285 emit_group_load (rtx dst, rtx src, tree type, int ssize)
2287 rtx *tmps;
2288 int i;
2290 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2291 emit_group_load_1 (tmps, dst, src, type, ssize);
2293 /* Copy the extracted pieces into the proper (probable) hard regs. */
2294 for (i = 0; i < XVECLEN (dst, 0); i++)
2296 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2297 if (d == NULL)
2298 continue;
2299 emit_move_insn (d, tmps[i]);
2303 /* Similar, but load SRC into new pseudos in a format that looks like
2304 PARALLEL. This can later be fed to emit_group_move to get things
2305 in the right place. */
2308 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
2310 rtvec vec;
2311 int i;
2313 vec = rtvec_alloc (XVECLEN (parallel, 0));
2314 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2316 /* Convert the vector to look just like the original PARALLEL, except
2317 with the computed values. */
2318 for (i = 0; i < XVECLEN (parallel, 0); i++)
2320 rtx e = XVECEXP (parallel, 0, i);
2321 rtx d = XEXP (e, 0);
2323 if (d)
2325 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2326 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2328 RTVEC_ELT (vec, i) = e;
2331 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2334 /* Emit code to move a block SRC to block DST, where SRC and DST are
2335 non-consecutive groups of registers, each represented by a PARALLEL. */
2337 void
2338 emit_group_move (rtx dst, rtx src)
2340 int i;
2342 gcc_assert (GET_CODE (src) == PARALLEL
2343 && GET_CODE (dst) == PARALLEL
2344 && XVECLEN (src, 0) == XVECLEN (dst, 0));
2346 /* Skip first entry if NULL. */
2347 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2348 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2349 XEXP (XVECEXP (src, 0, i), 0));
2352 /* Move a group of registers represented by a PARALLEL into pseudos. */
2355 emit_group_move_into_temps (rtx src)
2357 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2358 int i;
2360 for (i = 0; i < XVECLEN (src, 0); i++)
2362 rtx e = XVECEXP (src, 0, i);
2363 rtx d = XEXP (e, 0);
2365 if (d)
2366 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2367 RTVEC_ELT (vec, i) = e;
2370 return gen_rtx_PARALLEL (GET_MODE (src), vec);
2373 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2374 where SRC is non-consecutive registers represented by a PARALLEL.
2375 SSIZE represents the total size of block ORIG_DST, or -1 if not
2376 known. */
2378 void
2379 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2381 rtx *tmps, dst;
2382 int start, finish, i;
2383 machine_mode m = GET_MODE (orig_dst);
2385 gcc_assert (GET_CODE (src) == PARALLEL);
2387 if (!SCALAR_INT_MODE_P (m)
2388 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2390 scalar_int_mode imode;
2391 if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2393 dst = gen_reg_rtx (imode);
2394 emit_group_store (dst, src, type, ssize);
2395 dst = gen_lowpart (GET_MODE (orig_dst), dst);
2397 else
2399 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2400 emit_group_store (dst, src, type, ssize);
2402 emit_move_insn (orig_dst, dst);
2403 return;
2406 /* Check for a NULL entry, used to indicate that the parameter goes
2407 both on the stack and in registers. */
2408 if (XEXP (XVECEXP (src, 0, 0), 0))
2409 start = 0;
2410 else
2411 start = 1;
2412 finish = XVECLEN (src, 0);
2414 tmps = XALLOCAVEC (rtx, finish);
2416 /* Copy the (probable) hard regs into pseudos. */
2417 for (i = start; i < finish; i++)
2419 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2420 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2422 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2423 emit_move_insn (tmps[i], reg);
2425 else
2426 tmps[i] = reg;
2429 /* If we won't be storing directly into memory, protect the real destination
2430 from strange tricks we might play. */
2431 dst = orig_dst;
2432 if (GET_CODE (dst) == PARALLEL)
2434 rtx temp;
2436 /* We can get a PARALLEL dst if there is a conditional expression in
2437 a return statement. In that case, the dst and src are the same,
2438 so no action is necessary. */
2439 if (rtx_equal_p (dst, src))
2440 return;
2442 /* It is unclear if we can ever reach here, but we may as well handle
2443 it. Allocate a temporary, and split this into a store/load to/from
2444 the temporary. */
2445 temp = assign_stack_temp (GET_MODE (dst), ssize);
2446 emit_group_store (temp, src, type, ssize);
2447 emit_group_load (dst, temp, type, ssize);
2448 return;
2450 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2452 machine_mode outer = GET_MODE (dst);
2453 machine_mode inner;
2454 HOST_WIDE_INT bytepos;
2455 bool done = false;
2456 rtx temp;
2458 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2459 dst = gen_reg_rtx (outer);
2461 /* Make life a bit easier for combine. */
2462 /* If the first element of the vector is the low part
2463 of the destination mode, use a paradoxical subreg to
2464 initialize the destination. */
2465 if (start < finish)
2467 inner = GET_MODE (tmps[start]);
2468 bytepos = subreg_lowpart_offset (inner, outer);
2469 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2471 temp = simplify_gen_subreg (outer, tmps[start],
2472 inner, 0);
2473 if (temp)
2475 emit_move_insn (dst, temp);
2476 done = true;
2477 start++;
2482 /* If the first element wasn't the low part, try the last. */
2483 if (!done
2484 && start < finish - 1)
2486 inner = GET_MODE (tmps[finish - 1]);
2487 bytepos = subreg_lowpart_offset (inner, outer);
2488 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2490 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2491 inner, 0);
2492 if (temp)
2494 emit_move_insn (dst, temp);
2495 done = true;
2496 finish--;
2501 /* Otherwise, simply initialize the result to zero. */
2502 if (!done)
2503 emit_move_insn (dst, CONST0_RTX (outer));
2506 /* Process the pieces. */
2507 for (i = start; i < finish; i++)
2509 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2510 machine_mode mode = GET_MODE (tmps[i]);
2511 unsigned int bytelen = GET_MODE_SIZE (mode);
2512 unsigned int adj_bytelen;
2513 rtx dest = dst;
2515 /* Handle trailing fragments that run over the size of the struct. */
2516 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2517 adj_bytelen = ssize - bytepos;
2518 else
2519 adj_bytelen = bytelen;
2521 if (GET_CODE (dst) == CONCAT)
2523 if (bytepos + adj_bytelen
2524 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2525 dest = XEXP (dst, 0);
2526 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2528 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2529 dest = XEXP (dst, 1);
2531 else
2533 machine_mode dest_mode = GET_MODE (dest);
2534 machine_mode tmp_mode = GET_MODE (tmps[i]);
2536 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2538 if (GET_MODE_ALIGNMENT (dest_mode)
2539 >= GET_MODE_ALIGNMENT (tmp_mode))
2541 dest = assign_stack_temp (dest_mode,
2542 GET_MODE_SIZE (dest_mode));
2543 emit_move_insn (adjust_address (dest,
2544 tmp_mode,
2545 bytepos),
2546 tmps[i]);
2547 dst = dest;
2549 else
2551 dest = assign_stack_temp (tmp_mode,
2552 GET_MODE_SIZE (tmp_mode));
2553 emit_move_insn (dest, tmps[i]);
2554 dst = adjust_address (dest, dest_mode, bytepos);
2556 break;
2560 /* Handle trailing fragments that run over the size of the struct. */
2561 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2563 /* store_bit_field always takes its value from the lsb.
2564 Move the fragment to the lsb if it's not already there. */
2565 if (
2566 #ifdef BLOCK_REG_PADDING
2567 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2568 == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2569 #else
2570 BYTES_BIG_ENDIAN
2571 #endif
2574 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2575 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2576 shift, tmps[i], 0);
2579 /* Make sure not to write past the end of the struct. */
2580 store_bit_field (dest,
2581 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2582 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2583 VOIDmode, tmps[i], false);
2586 /* Optimize the access just a bit. */
2587 else if (MEM_P (dest)
2588 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest))
2589 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2590 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2591 && bytelen == GET_MODE_SIZE (mode))
2592 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2594 else
2595 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2596 0, 0, mode, tmps[i], false);
2599 /* Copy from the pseudo into the (probable) hard reg. */
2600 if (orig_dst != dst)
2601 emit_move_insn (orig_dst, dst);
2604 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2605 of the value stored in X. */
2608 maybe_emit_group_store (rtx x, tree type)
2610 machine_mode mode = TYPE_MODE (type);
2611 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2612 if (GET_CODE (x) == PARALLEL)
2614 rtx result = gen_reg_rtx (mode);
2615 emit_group_store (result, x, type, int_size_in_bytes (type));
2616 return result;
2618 return x;
2621 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2623 This is used on targets that return BLKmode values in registers. */
2625 static void
2626 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2628 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2629 rtx src = NULL, dst = NULL;
2630 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2631 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2632 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2633 fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg));
2634 fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target));
2635 fixed_size_mode copy_mode;
2637 /* BLKmode registers created in the back-end shouldn't have survived. */
2638 gcc_assert (mode != BLKmode);
2640 /* If the structure doesn't take up a whole number of words, see whether
2641 SRCREG is padded on the left or on the right. If it's on the left,
2642 set PADDING_CORRECTION to the number of bits to skip.
2644 In most ABIs, the structure will be returned at the least end of
2645 the register, which translates to right padding on little-endian
2646 targets and left padding on big-endian targets. The opposite
2647 holds if the structure is returned at the most significant
2648 end of the register. */
2649 if (bytes % UNITS_PER_WORD != 0
2650 && (targetm.calls.return_in_msb (type)
2651 ? !BYTES_BIG_ENDIAN
2652 : BYTES_BIG_ENDIAN))
2653 padding_correction
2654 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2656 /* We can use a single move if we have an exact mode for the size. */
2657 else if (MEM_P (target)
2658 && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (target))
2659 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2660 && bytes == GET_MODE_SIZE (mode))
2662 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2663 return;
2666 /* And if we additionally have the same mode for a register. */
2667 else if (REG_P (target)
2668 && GET_MODE (target) == mode
2669 && bytes == GET_MODE_SIZE (mode))
2671 emit_move_insn (target, srcreg);
2672 return;
2675 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2676 into a new pseudo which is a full word. */
2677 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2679 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2680 mode = word_mode;
2683 /* Copy the structure BITSIZE bits at a time. If the target lives in
2684 memory, take care of not reading/writing past its end by selecting
2685 a copy mode suited to BITSIZE. This should always be possible given
2686 how it is computed.
2688 If the target lives in register, make sure not to select a copy mode
2689 larger than the mode of the register.
2691 We could probably emit more efficient code for machines which do not use
2692 strict alignment, but it doesn't seem worth the effort at the current
2693 time. */
2695 copy_mode = word_mode;
2696 if (MEM_P (target))
2698 opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2699 if (mem_mode.exists ())
2700 copy_mode = mem_mode.require ();
2702 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2703 copy_mode = tmode;
2705 for (bitpos = 0, xbitpos = padding_correction;
2706 bitpos < bytes * BITS_PER_UNIT;
2707 bitpos += bitsize, xbitpos += bitsize)
2709 /* We need a new source operand each time xbitpos is on a
2710 word boundary and when xbitpos == padding_correction
2711 (the first time through). */
2712 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2713 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2715 /* We need a new destination operand each time bitpos is on
2716 a word boundary. */
2717 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2718 dst = target;
2719 else if (bitpos % BITS_PER_WORD == 0)
2720 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2722 /* Use xbitpos for the source extraction (right justified) and
2723 bitpos for the destination store (left justified). */
2724 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2725 extract_bit_field (src, bitsize,
2726 xbitpos % BITS_PER_WORD, 1,
2727 NULL_RTX, copy_mode, copy_mode,
2728 false, NULL),
2729 false);
2733 /* Copy BLKmode value SRC into a register of mode MODE_IN. Return the
2734 register if it contains any data, otherwise return null.
2736 This is used on targets that return BLKmode values in registers. */
2739 copy_blkmode_to_reg (machine_mode mode_in, tree src)
2741 int i, n_regs;
2742 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2743 unsigned int bitsize;
2744 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2745 /* No current ABI uses variable-sized modes to pass a BLKmnode type. */
2746 fixed_size_mode mode = as_a <fixed_size_mode> (mode_in);
2747 fixed_size_mode dst_mode;
2749 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2751 x = expand_normal (src);
2753 bytes = arg_int_size_in_bytes (TREE_TYPE (src));
2754 if (bytes == 0)
2755 return NULL_RTX;
2757 /* If the structure doesn't take up a whole number of words, see
2758 whether the register value should be padded on the left or on
2759 the right. Set PADDING_CORRECTION to the number of padding
2760 bits needed on the left side.
2762 In most ABIs, the structure will be returned at the least end of
2763 the register, which translates to right padding on little-endian
2764 targets and left padding on big-endian targets. The opposite
2765 holds if the structure is returned at the most significant
2766 end of the register. */
2767 if (bytes % UNITS_PER_WORD != 0
2768 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2769 ? !BYTES_BIG_ENDIAN
2770 : BYTES_BIG_ENDIAN))
2771 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2772 * BITS_PER_UNIT));
2774 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2775 dst_words = XALLOCAVEC (rtx, n_regs);
2776 bitsize = BITS_PER_WORD;
2777 if (targetm.slow_unaligned_access (word_mode, TYPE_ALIGN (TREE_TYPE (src))))
2778 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2780 /* Copy the structure BITSIZE bits at a time. */
2781 for (bitpos = 0, xbitpos = padding_correction;
2782 bitpos < bytes * BITS_PER_UNIT;
2783 bitpos += bitsize, xbitpos += bitsize)
2785 /* We need a new destination pseudo each time xbitpos is
2786 on a word boundary and when xbitpos == padding_correction
2787 (the first time through). */
2788 if (xbitpos % BITS_PER_WORD == 0
2789 || xbitpos == padding_correction)
2791 /* Generate an appropriate register. */
2792 dst_word = gen_reg_rtx (word_mode);
2793 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2795 /* Clear the destination before we move anything into it. */
2796 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2799 /* We need a new source operand each time bitpos is on a word
2800 boundary. */
2801 if (bitpos % BITS_PER_WORD == 0)
2802 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2804 /* Use bitpos for the source extraction (left justified) and
2805 xbitpos for the destination store (right justified). */
2806 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2807 0, 0, word_mode,
2808 extract_bit_field (src_word, bitsize,
2809 bitpos % BITS_PER_WORD, 1,
2810 NULL_RTX, word_mode, word_mode,
2811 false, NULL),
2812 false);
2815 if (mode == BLKmode)
2817 /* Find the smallest integer mode large enough to hold the
2818 entire structure. */
2819 opt_scalar_int_mode mode_iter;
2820 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2821 if (GET_MODE_SIZE (mode_iter.require ()) >= bytes)
2822 break;
2824 /* A suitable mode should have been found. */
2825 mode = mode_iter.require ();
2828 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2829 dst_mode = word_mode;
2830 else
2831 dst_mode = mode;
2832 dst = gen_reg_rtx (dst_mode);
2834 for (i = 0; i < n_regs; i++)
2835 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2837 if (mode != dst_mode)
2838 dst = gen_lowpart (mode, dst);
2840 return dst;
2843 /* Add a USE expression for REG to the (possibly empty) list pointed
2844 to by CALL_FUSAGE. REG must denote a hard register. */
2846 void
2847 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2849 gcc_assert (REG_P (reg));
2851 if (!HARD_REGISTER_P (reg))
2852 return;
2854 *call_fusage
2855 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2858 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2859 to by CALL_FUSAGE. REG must denote a hard register. */
2861 void
2862 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2864 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2866 *call_fusage
2867 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2870 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2871 starting at REGNO. All of these registers must be hard registers. */
2873 void
2874 use_regs (rtx *call_fusage, int regno, int nregs)
2876 int i;
2878 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2880 for (i = 0; i < nregs; i++)
2881 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2884 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2885 PARALLEL REGS. This is for calls that pass values in multiple
2886 non-contiguous locations. The Irix 6 ABI has examples of this. */
2888 void
2889 use_group_regs (rtx *call_fusage, rtx regs)
2891 int i;
2893 for (i = 0; i < XVECLEN (regs, 0); i++)
2895 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2897 /* A NULL entry means the parameter goes both on the stack and in
2898 registers. This can also be a MEM for targets that pass values
2899 partially on the stack and partially in registers. */
2900 if (reg != 0 && REG_P (reg))
2901 use_reg (call_fusage, reg);
2905 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2906 assigment and the code of the expresion on the RHS is CODE. Return
2907 NULL otherwise. */
2909 static gimple *
2910 get_def_for_expr (tree name, enum tree_code code)
2912 gimple *def_stmt;
2914 if (TREE_CODE (name) != SSA_NAME)
2915 return NULL;
2917 def_stmt = get_gimple_for_ssa_name (name);
2918 if (!def_stmt
2919 || gimple_assign_rhs_code (def_stmt) != code)
2920 return NULL;
2922 return def_stmt;
2925 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2926 assigment and the class of the expresion on the RHS is CLASS. Return
2927 NULL otherwise. */
2929 static gimple *
2930 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2932 gimple *def_stmt;
2934 if (TREE_CODE (name) != SSA_NAME)
2935 return NULL;
2937 def_stmt = get_gimple_for_ssa_name (name);
2938 if (!def_stmt
2939 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2940 return NULL;
2942 return def_stmt;
2945 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2946 its length in bytes. */
2949 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2950 unsigned int expected_align, HOST_WIDE_INT expected_size,
2951 unsigned HOST_WIDE_INT min_size,
2952 unsigned HOST_WIDE_INT max_size,
2953 unsigned HOST_WIDE_INT probable_max_size)
2955 machine_mode mode = GET_MODE (object);
2956 unsigned int align;
2958 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2960 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2961 just move a zero. Otherwise, do this a piece at a time. */
2962 if (mode != BLKmode
2963 && CONST_INT_P (size)
2964 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2966 rtx zero = CONST0_RTX (mode);
2967 if (zero != NULL)
2969 emit_move_insn (object, zero);
2970 return NULL;
2973 if (COMPLEX_MODE_P (mode))
2975 zero = CONST0_RTX (GET_MODE_INNER (mode));
2976 if (zero != NULL)
2978 write_complex_part (object, zero, 0);
2979 write_complex_part (object, zero, 1);
2980 return NULL;
2985 if (size == const0_rtx)
2986 return NULL;
2988 align = MEM_ALIGN (object);
2990 if (CONST_INT_P (size)
2991 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2992 CLEAR_BY_PIECES,
2993 optimize_insn_for_speed_p ()))
2994 clear_by_pieces (object, INTVAL (size), align);
2995 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2996 expected_align, expected_size,
2997 min_size, max_size, probable_max_size))
2999 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
3000 return set_storage_via_libcall (object, size, const0_rtx,
3001 method == BLOCK_OP_TAILCALL);
3002 else
3003 gcc_unreachable ();
3005 return NULL;
3009 clear_storage (rtx object, rtx size, enum block_op_methods method)
3011 unsigned HOST_WIDE_INT max, min = 0;
3012 if (GET_CODE (size) == CONST_INT)
3013 min = max = UINTVAL (size);
3014 else
3015 max = GET_MODE_MASK (GET_MODE (size));
3016 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3020 /* A subroutine of clear_storage. Expand a call to memset.
3021 Return the return value of memset, 0 otherwise. */
3024 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3026 tree call_expr, fn, object_tree, size_tree, val_tree;
3027 machine_mode size_mode;
3029 object = copy_addr_to_reg (XEXP (object, 0));
3030 object_tree = make_tree (ptr_type_node, object);
3032 if (!CONST_INT_P (val))
3033 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3034 val_tree = make_tree (integer_type_node, val);
3036 size_mode = TYPE_MODE (sizetype);
3037 size = convert_to_mode (size_mode, size, 1);
3038 size = copy_to_mode_reg (size_mode, size);
3039 size_tree = make_tree (sizetype, size);
3041 /* It is incorrect to use the libcall calling conventions for calls to
3042 memset because it can be provided by the user. */
3043 fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3044 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3045 CALL_EXPR_TAILCALL (call_expr) = tailcall;
3047 return expand_call (call_expr, NULL_RTX, false);
3050 /* Expand a setmem pattern; return true if successful. */
3052 bool
3053 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3054 unsigned int expected_align, HOST_WIDE_INT expected_size,
3055 unsigned HOST_WIDE_INT min_size,
3056 unsigned HOST_WIDE_INT max_size,
3057 unsigned HOST_WIDE_INT probable_max_size)
3059 /* Try the most limited insn first, because there's no point
3060 including more than one in the machine description unless
3061 the more limited one has some advantage. */
3063 if (expected_align < align)
3064 expected_align = align;
3065 if (expected_size != -1)
3067 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3068 expected_size = max_size;
3069 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3070 expected_size = min_size;
3073 opt_scalar_int_mode mode_iter;
3074 FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
3076 scalar_int_mode mode = mode_iter.require ();
3077 enum insn_code code = direct_optab_handler (setmem_optab, mode);
3079 if (code != CODE_FOR_nothing
3080 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3081 here because if SIZE is less than the mode mask, as it is
3082 returned by the macro, it will definitely be less than the
3083 actual mode mask. Since SIZE is within the Pmode address
3084 space, we limit MODE to Pmode. */
3085 && ((CONST_INT_P (size)
3086 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3087 <= (GET_MODE_MASK (mode) >> 1)))
3088 || max_size <= (GET_MODE_MASK (mode) >> 1)
3089 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3091 struct expand_operand ops[9];
3092 unsigned int nops;
3094 nops = insn_data[(int) code].n_generator_args;
3095 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3097 create_fixed_operand (&ops[0], object);
3098 /* The check above guarantees that this size conversion is valid. */
3099 create_convert_operand_to (&ops[1], size, mode, true);
3100 create_convert_operand_from (&ops[2], val, byte_mode, true);
3101 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3102 if (nops >= 6)
3104 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3105 create_integer_operand (&ops[5], expected_size);
3107 if (nops >= 8)
3109 create_integer_operand (&ops[6], min_size);
3110 /* If we can not represent the maximal size,
3111 make parameter NULL. */
3112 if ((HOST_WIDE_INT) max_size != -1)
3113 create_integer_operand (&ops[7], max_size);
3114 else
3115 create_fixed_operand (&ops[7], NULL);
3117 if (nops == 9)
3119 /* If we can not represent the maximal size,
3120 make parameter NULL. */
3121 if ((HOST_WIDE_INT) probable_max_size != -1)
3122 create_integer_operand (&ops[8], probable_max_size);
3123 else
3124 create_fixed_operand (&ops[8], NULL);
3126 if (maybe_expand_insn (code, nops, ops))
3127 return true;
3131 return false;
3135 /* Write to one of the components of the complex value CPLX. Write VAL to
3136 the real part if IMAG_P is false, and the imaginary part if its true. */
3138 void
3139 write_complex_part (rtx cplx, rtx val, bool imag_p)
3141 machine_mode cmode;
3142 scalar_mode imode;
3143 unsigned ibitsize;
3145 if (GET_CODE (cplx) == CONCAT)
3147 emit_move_insn (XEXP (cplx, imag_p), val);
3148 return;
3151 cmode = GET_MODE (cplx);
3152 imode = GET_MODE_INNER (cmode);
3153 ibitsize = GET_MODE_BITSIZE (imode);
3155 /* For MEMs simplify_gen_subreg may generate an invalid new address
3156 because, e.g., the original address is considered mode-dependent
3157 by the target, which restricts simplify_subreg from invoking
3158 adjust_address_nv. Instead of preparing fallback support for an
3159 invalid address, we call adjust_address_nv directly. */
3160 if (MEM_P (cplx))
3162 emit_move_insn (adjust_address_nv (cplx, imode,
3163 imag_p ? GET_MODE_SIZE (imode) : 0),
3164 val);
3165 return;
3168 /* If the sub-object is at least word sized, then we know that subregging
3169 will work. This special case is important, since store_bit_field
3170 wants to operate on integer modes, and there's rarely an OImode to
3171 correspond to TCmode. */
3172 if (ibitsize >= BITS_PER_WORD
3173 /* For hard regs we have exact predicates. Assume we can split
3174 the original object if it spans an even number of hard regs.
3175 This special case is important for SCmode on 64-bit platforms
3176 where the natural size of floating-point regs is 32-bit. */
3177 || (REG_P (cplx)
3178 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3179 && REG_NREGS (cplx) % 2 == 0))
3181 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3182 imag_p ? GET_MODE_SIZE (imode) : 0);
3183 if (part)
3185 emit_move_insn (part, val);
3186 return;
3188 else
3189 /* simplify_gen_subreg may fail for sub-word MEMs. */
3190 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3193 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3194 false);
3197 /* Extract one of the components of the complex value CPLX. Extract the
3198 real part if IMAG_P is false, and the imaginary part if it's true. */
3201 read_complex_part (rtx cplx, bool imag_p)
3203 machine_mode cmode;
3204 scalar_mode imode;
3205 unsigned ibitsize;
3207 if (GET_CODE (cplx) == CONCAT)
3208 return XEXP (cplx, imag_p);
3210 cmode = GET_MODE (cplx);
3211 imode = GET_MODE_INNER (cmode);
3212 ibitsize = GET_MODE_BITSIZE (imode);
3214 /* Special case reads from complex constants that got spilled to memory. */
3215 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3217 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3218 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3220 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3221 if (CONSTANT_CLASS_P (part))
3222 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3226 /* For MEMs simplify_gen_subreg may generate an invalid new address
3227 because, e.g., the original address is considered mode-dependent
3228 by the target, which restricts simplify_subreg from invoking
3229 adjust_address_nv. Instead of preparing fallback support for an
3230 invalid address, we call adjust_address_nv directly. */
3231 if (MEM_P (cplx))
3232 return adjust_address_nv (cplx, imode,
3233 imag_p ? GET_MODE_SIZE (imode) : 0);
3235 /* If the sub-object is at least word sized, then we know that subregging
3236 will work. This special case is important, since extract_bit_field
3237 wants to operate on integer modes, and there's rarely an OImode to
3238 correspond to TCmode. */
3239 if (ibitsize >= BITS_PER_WORD
3240 /* For hard regs we have exact predicates. Assume we can split
3241 the original object if it spans an even number of hard regs.
3242 This special case is important for SCmode on 64-bit platforms
3243 where the natural size of floating-point regs is 32-bit. */
3244 || (REG_P (cplx)
3245 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3246 && REG_NREGS (cplx) % 2 == 0))
3248 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3249 imag_p ? GET_MODE_SIZE (imode) : 0);
3250 if (ret)
3251 return ret;
3252 else
3253 /* simplify_gen_subreg may fail for sub-word MEMs. */
3254 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3257 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3258 true, NULL_RTX, imode, imode, false, NULL);
3261 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3262 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3263 represented in NEW_MODE. If FORCE is true, this will never happen, as
3264 we'll force-create a SUBREG if needed. */
3266 static rtx
3267 emit_move_change_mode (machine_mode new_mode,
3268 machine_mode old_mode, rtx x, bool force)
3270 rtx ret;
3272 if (push_operand (x, GET_MODE (x)))
3274 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3275 MEM_COPY_ATTRIBUTES (ret, x);
3277 else if (MEM_P (x))
3279 /* We don't have to worry about changing the address since the
3280 size in bytes is supposed to be the same. */
3281 if (reload_in_progress)
3283 /* Copy the MEM to change the mode and move any
3284 substitutions from the old MEM to the new one. */
3285 ret = adjust_address_nv (x, new_mode, 0);
3286 copy_replacements (x, ret);
3288 else
3289 ret = adjust_address (x, new_mode, 0);
3291 else
3293 /* Note that we do want simplify_subreg's behavior of validating
3294 that the new mode is ok for a hard register. If we were to use
3295 simplify_gen_subreg, we would create the subreg, but would
3296 probably run into the target not being able to implement it. */
3297 /* Except, of course, when FORCE is true, when this is exactly what
3298 we want. Which is needed for CCmodes on some targets. */
3299 if (force)
3300 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3301 else
3302 ret = simplify_subreg (new_mode, x, old_mode, 0);
3305 return ret;
3308 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3309 an integer mode of the same size as MODE. Returns the instruction
3310 emitted, or NULL if such a move could not be generated. */
3312 static rtx_insn *
3313 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3315 scalar_int_mode imode;
3316 enum insn_code code;
3318 /* There must exist a mode of the exact size we require. */
3319 if (!int_mode_for_mode (mode).exists (&imode))
3320 return NULL;
3322 /* The target must support moves in this mode. */
3323 code = optab_handler (mov_optab, imode);
3324 if (code == CODE_FOR_nothing)
3325 return NULL;
3327 x = emit_move_change_mode (imode, mode, x, force);
3328 if (x == NULL_RTX)
3329 return NULL;
3330 y = emit_move_change_mode (imode, mode, y, force);
3331 if (y == NULL_RTX)
3332 return NULL;
3333 return emit_insn (GEN_FCN (code) (x, y));
3336 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3337 Return an equivalent MEM that does not use an auto-increment. */
3340 emit_move_resolve_push (machine_mode mode, rtx x)
3342 enum rtx_code code = GET_CODE (XEXP (x, 0));
3343 HOST_WIDE_INT adjust;
3344 rtx temp;
3346 adjust = GET_MODE_SIZE (mode);
3347 #ifdef PUSH_ROUNDING
3348 adjust = PUSH_ROUNDING (adjust);
3349 #endif
3350 if (code == PRE_DEC || code == POST_DEC)
3351 adjust = -adjust;
3352 else if (code == PRE_MODIFY || code == POST_MODIFY)
3354 rtx expr = XEXP (XEXP (x, 0), 1);
3355 HOST_WIDE_INT val;
3357 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3358 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3359 val = INTVAL (XEXP (expr, 1));
3360 if (GET_CODE (expr) == MINUS)
3361 val = -val;
3362 gcc_assert (adjust == val || adjust == -val);
3363 adjust = val;
3366 /* Do not use anti_adjust_stack, since we don't want to update
3367 stack_pointer_delta. */
3368 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3369 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3370 0, OPTAB_LIB_WIDEN);
3371 if (temp != stack_pointer_rtx)
3372 emit_move_insn (stack_pointer_rtx, temp);
3374 switch (code)
3376 case PRE_INC:
3377 case PRE_DEC:
3378 case PRE_MODIFY:
3379 temp = stack_pointer_rtx;
3380 break;
3381 case POST_INC:
3382 case POST_DEC:
3383 case POST_MODIFY:
3384 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3385 break;
3386 default:
3387 gcc_unreachable ();
3390 return replace_equiv_address (x, temp);
3393 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3394 X is known to satisfy push_operand, and MODE is known to be complex.
3395 Returns the last instruction emitted. */
3397 rtx_insn *
3398 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3400 scalar_mode submode = GET_MODE_INNER (mode);
3401 bool imag_first;
3403 #ifdef PUSH_ROUNDING
3404 unsigned int submodesize = GET_MODE_SIZE (submode);
3406 /* In case we output to the stack, but the size is smaller than the
3407 machine can push exactly, we need to use move instructions. */
3408 if (PUSH_ROUNDING (submodesize) != submodesize)
3410 x = emit_move_resolve_push (mode, x);
3411 return emit_move_insn (x, y);
3413 #endif
3415 /* Note that the real part always precedes the imag part in memory
3416 regardless of machine's endianness. */
3417 switch (GET_CODE (XEXP (x, 0)))
3419 case PRE_DEC:
3420 case POST_DEC:
3421 imag_first = true;
3422 break;
3423 case PRE_INC:
3424 case POST_INC:
3425 imag_first = false;
3426 break;
3427 default:
3428 gcc_unreachable ();
3431 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3432 read_complex_part (y, imag_first));
3433 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3434 read_complex_part (y, !imag_first));
3437 /* A subroutine of emit_move_complex. Perform the move from Y to X
3438 via two moves of the parts. Returns the last instruction emitted. */
3440 rtx_insn *
3441 emit_move_complex_parts (rtx x, rtx y)
3443 /* Show the output dies here. This is necessary for SUBREGs
3444 of pseudos since we cannot track their lifetimes correctly;
3445 hard regs shouldn't appear here except as return values. */
3446 if (!reload_completed && !reload_in_progress
3447 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3448 emit_clobber (x);
3450 write_complex_part (x, read_complex_part (y, false), false);
3451 write_complex_part (x, read_complex_part (y, true), true);
3453 return get_last_insn ();
3456 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3457 MODE is known to be complex. Returns the last instruction emitted. */
3459 static rtx_insn *
3460 emit_move_complex (machine_mode mode, rtx x, rtx y)
3462 bool try_int;
3464 /* Need to take special care for pushes, to maintain proper ordering
3465 of the data, and possibly extra padding. */
3466 if (push_operand (x, mode))
3467 return emit_move_complex_push (mode, x, y);
3469 /* See if we can coerce the target into moving both values at once, except
3470 for floating point where we favor moving as parts if this is easy. */
3471 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3472 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3473 && !(REG_P (x)
3474 && HARD_REGISTER_P (x)
3475 && REG_NREGS (x) == 1)
3476 && !(REG_P (y)
3477 && HARD_REGISTER_P (y)
3478 && REG_NREGS (y) == 1))
3479 try_int = false;
3480 /* Not possible if the values are inherently not adjacent. */
3481 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3482 try_int = false;
3483 /* Is possible if both are registers (or subregs of registers). */
3484 else if (register_operand (x, mode) && register_operand (y, mode))
3485 try_int = true;
3486 /* If one of the operands is a memory, and alignment constraints
3487 are friendly enough, we may be able to do combined memory operations.
3488 We do not attempt this if Y is a constant because that combination is
3489 usually better with the by-parts thing below. */
3490 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3491 && (!STRICT_ALIGNMENT
3492 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3493 try_int = true;
3494 else
3495 try_int = false;
3497 if (try_int)
3499 rtx_insn *ret;
3501 /* For memory to memory moves, optimal behavior can be had with the
3502 existing block move logic. */
3503 if (MEM_P (x) && MEM_P (y))
3505 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3506 BLOCK_OP_NO_LIBCALL);
3507 return get_last_insn ();
3510 ret = emit_move_via_integer (mode, x, y, true);
3511 if (ret)
3512 return ret;
3515 return emit_move_complex_parts (x, y);
3518 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3519 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3521 static rtx_insn *
3522 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3524 rtx_insn *ret;
3526 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3527 if (mode != CCmode)
3529 enum insn_code code = optab_handler (mov_optab, CCmode);
3530 if (code != CODE_FOR_nothing)
3532 x = emit_move_change_mode (CCmode, mode, x, true);
3533 y = emit_move_change_mode (CCmode, mode, y, true);
3534 return emit_insn (GEN_FCN (code) (x, y));
3538 /* Otherwise, find the MODE_INT mode of the same width. */
3539 ret = emit_move_via_integer (mode, x, y, false);
3540 gcc_assert (ret != NULL);
3541 return ret;
3544 /* Return true if word I of OP lies entirely in the
3545 undefined bits of a paradoxical subreg. */
3547 static bool
3548 undefined_operand_subword_p (const_rtx op, int i)
3550 if (GET_CODE (op) != SUBREG)
3551 return false;
3552 machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
3553 HOST_WIDE_INT offset = i * UNITS_PER_WORD + subreg_memory_offset (op);
3554 return (offset >= GET_MODE_SIZE (innermostmode)
3555 || offset <= -UNITS_PER_WORD);
3558 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3559 MODE is any multi-word or full-word mode that lacks a move_insn
3560 pattern. Note that you will get better code if you define such
3561 patterns, even if they must turn into multiple assembler instructions. */
3563 static rtx_insn *
3564 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3566 rtx_insn *last_insn = 0;
3567 rtx_insn *seq;
3568 rtx inner;
3569 bool need_clobber;
3570 int i;
3572 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3574 /* If X is a push on the stack, do the push now and replace
3575 X with a reference to the stack pointer. */
3576 if (push_operand (x, mode))
3577 x = emit_move_resolve_push (mode, x);
3579 /* If we are in reload, see if either operand is a MEM whose address
3580 is scheduled for replacement. */
3581 if (reload_in_progress && MEM_P (x)
3582 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3583 x = replace_equiv_address_nv (x, inner);
3584 if (reload_in_progress && MEM_P (y)
3585 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3586 y = replace_equiv_address_nv (y, inner);
3588 start_sequence ();
3590 need_clobber = false;
3591 for (i = 0;
3592 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3593 i++)
3595 rtx xpart = operand_subword (x, i, 1, mode);
3596 rtx ypart;
3598 /* Do not generate code for a move if it would come entirely
3599 from the undefined bits of a paradoxical subreg. */
3600 if (undefined_operand_subword_p (y, i))
3601 continue;
3603 ypart = operand_subword (y, i, 1, mode);
3605 /* If we can't get a part of Y, put Y into memory if it is a
3606 constant. Otherwise, force it into a register. Then we must
3607 be able to get a part of Y. */
3608 if (ypart == 0 && CONSTANT_P (y))
3610 y = use_anchored_address (force_const_mem (mode, y));
3611 ypart = operand_subword (y, i, 1, mode);
3613 else if (ypart == 0)
3614 ypart = operand_subword_force (y, i, mode);
3616 gcc_assert (xpart && ypart);
3618 need_clobber |= (GET_CODE (xpart) == SUBREG);
3620 last_insn = emit_move_insn (xpart, ypart);
3623 seq = get_insns ();
3624 end_sequence ();
3626 /* Show the output dies here. This is necessary for SUBREGs
3627 of pseudos since we cannot track their lifetimes correctly;
3628 hard regs shouldn't appear here except as return values.
3629 We never want to emit such a clobber after reload. */
3630 if (x != y
3631 && ! (reload_in_progress || reload_completed)
3632 && need_clobber != 0)
3633 emit_clobber (x);
3635 emit_insn (seq);
3637 return last_insn;
3640 /* Low level part of emit_move_insn.
3641 Called just like emit_move_insn, but assumes X and Y
3642 are basically valid. */
3644 rtx_insn *
3645 emit_move_insn_1 (rtx x, rtx y)
3647 machine_mode mode = GET_MODE (x);
3648 enum insn_code code;
3650 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3652 code = optab_handler (mov_optab, mode);
3653 if (code != CODE_FOR_nothing)
3654 return emit_insn (GEN_FCN (code) (x, y));
3656 /* Expand complex moves by moving real part and imag part. */
3657 if (COMPLEX_MODE_P (mode))
3658 return emit_move_complex (mode, x, y);
3660 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3661 || ALL_FIXED_POINT_MODE_P (mode))
3663 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3665 /* If we can't find an integer mode, use multi words. */
3666 if (result)
3667 return result;
3668 else
3669 return emit_move_multi_word (mode, x, y);
3672 if (GET_MODE_CLASS (mode) == MODE_CC)
3673 return emit_move_ccmode (mode, x, y);
3675 /* Try using a move pattern for the corresponding integer mode. This is
3676 only safe when simplify_subreg can convert MODE constants into integer
3677 constants. At present, it can only do this reliably if the value
3678 fits within a HOST_WIDE_INT. */
3679 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3681 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3683 if (ret)
3685 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3686 return ret;
3690 return emit_move_multi_word (mode, x, y);
3693 /* Generate code to copy Y into X.
3694 Both Y and X must have the same mode, except that
3695 Y can be a constant with VOIDmode.
3696 This mode cannot be BLKmode; use emit_block_move for that.
3698 Return the last instruction emitted. */
3700 rtx_insn *
3701 emit_move_insn (rtx x, rtx y)
3703 machine_mode mode = GET_MODE (x);
3704 rtx y_cst = NULL_RTX;
3705 rtx_insn *last_insn;
3706 rtx set;
3708 gcc_assert (mode != BLKmode
3709 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3711 if (CONSTANT_P (y))
3713 if (optimize
3714 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3715 && (last_insn = compress_float_constant (x, y)))
3716 return last_insn;
3718 y_cst = y;
3720 if (!targetm.legitimate_constant_p (mode, y))
3722 y = force_const_mem (mode, y);
3724 /* If the target's cannot_force_const_mem prevented the spill,
3725 assume that the target's move expanders will also take care
3726 of the non-legitimate constant. */
3727 if (!y)
3728 y = y_cst;
3729 else
3730 y = use_anchored_address (y);
3734 /* If X or Y are memory references, verify that their addresses are valid
3735 for the machine. */
3736 if (MEM_P (x)
3737 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3738 MEM_ADDR_SPACE (x))
3739 && ! push_operand (x, GET_MODE (x))))
3740 x = validize_mem (x);
3742 if (MEM_P (y)
3743 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3744 MEM_ADDR_SPACE (y)))
3745 y = validize_mem (y);
3747 gcc_assert (mode != BLKmode);
3749 last_insn = emit_move_insn_1 (x, y);
3751 if (y_cst && REG_P (x)
3752 && (set = single_set (last_insn)) != NULL_RTX
3753 && SET_DEST (set) == x
3754 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3755 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3757 return last_insn;
3760 /* Generate the body of an instruction to copy Y into X.
3761 It may be a list of insns, if one insn isn't enough. */
3763 rtx_insn *
3764 gen_move_insn (rtx x, rtx y)
3766 rtx_insn *seq;
3768 start_sequence ();
3769 emit_move_insn_1 (x, y);
3770 seq = get_insns ();
3771 end_sequence ();
3772 return seq;
3775 /* If Y is representable exactly in a narrower mode, and the target can
3776 perform the extension directly from constant or memory, then emit the
3777 move as an extension. */
3779 static rtx_insn *
3780 compress_float_constant (rtx x, rtx y)
3782 machine_mode dstmode = GET_MODE (x);
3783 machine_mode orig_srcmode = GET_MODE (y);
3784 machine_mode srcmode;
3785 const REAL_VALUE_TYPE *r;
3786 int oldcost, newcost;
3787 bool speed = optimize_insn_for_speed_p ();
3789 r = CONST_DOUBLE_REAL_VALUE (y);
3791 if (targetm.legitimate_constant_p (dstmode, y))
3792 oldcost = set_src_cost (y, orig_srcmode, speed);
3793 else
3794 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3796 FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3798 enum insn_code ic;
3799 rtx trunc_y;
3800 rtx_insn *last_insn;
3802 /* Skip if the target can't extend this way. */
3803 ic = can_extend_p (dstmode, srcmode, 0);
3804 if (ic == CODE_FOR_nothing)
3805 continue;
3807 /* Skip if the narrowed value isn't exact. */
3808 if (! exact_real_truncate (srcmode, r))
3809 continue;
3811 trunc_y = const_double_from_real_value (*r, srcmode);
3813 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3815 /* Skip if the target needs extra instructions to perform
3816 the extension. */
3817 if (!insn_operand_matches (ic, 1, trunc_y))
3818 continue;
3819 /* This is valid, but may not be cheaper than the original. */
3820 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3821 dstmode, speed);
3822 if (oldcost < newcost)
3823 continue;
3825 else if (float_extend_from_mem[dstmode][srcmode])
3827 trunc_y = force_const_mem (srcmode, trunc_y);
3828 /* This is valid, but may not be cheaper than the original. */
3829 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3830 dstmode, speed);
3831 if (oldcost < newcost)
3832 continue;
3833 trunc_y = validize_mem (trunc_y);
3835 else
3836 continue;
3838 /* For CSE's benefit, force the compressed constant pool entry
3839 into a new pseudo. This constant may be used in different modes,
3840 and if not, combine will put things back together for us. */
3841 trunc_y = force_reg (srcmode, trunc_y);
3843 /* If x is a hard register, perform the extension into a pseudo,
3844 so that e.g. stack realignment code is aware of it. */
3845 rtx target = x;
3846 if (REG_P (x) && HARD_REGISTER_P (x))
3847 target = gen_reg_rtx (dstmode);
3849 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3850 last_insn = get_last_insn ();
3852 if (REG_P (target))
3853 set_unique_reg_note (last_insn, REG_EQUAL, y);
3855 if (target != x)
3856 return emit_move_insn (x, target);
3857 return last_insn;
3860 return NULL;
3863 /* Pushing data onto the stack. */
3865 /* Push a block of length SIZE (perhaps variable)
3866 and return an rtx to address the beginning of the block.
3867 The value may be virtual_outgoing_args_rtx.
3869 EXTRA is the number of bytes of padding to push in addition to SIZE.
3870 BELOW nonzero means this padding comes at low addresses;
3871 otherwise, the padding comes at high addresses. */
3874 push_block (rtx size, int extra, int below)
3876 rtx temp;
3878 size = convert_modes (Pmode, ptr_mode, size, 1);
3879 if (CONSTANT_P (size))
3880 anti_adjust_stack (plus_constant (Pmode, size, extra));
3881 else if (REG_P (size) && extra == 0)
3882 anti_adjust_stack (size);
3883 else
3885 temp = copy_to_mode_reg (Pmode, size);
3886 if (extra != 0)
3887 temp = expand_binop (Pmode, add_optab, temp,
3888 gen_int_mode (extra, Pmode),
3889 temp, 0, OPTAB_LIB_WIDEN);
3890 anti_adjust_stack (temp);
3893 if (STACK_GROWS_DOWNWARD)
3895 temp = virtual_outgoing_args_rtx;
3896 if (extra != 0 && below)
3897 temp = plus_constant (Pmode, temp, extra);
3899 else
3901 if (CONST_INT_P (size))
3902 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3903 -INTVAL (size) - (below ? 0 : extra));
3904 else if (extra != 0 && !below)
3905 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3906 negate_rtx (Pmode, plus_constant (Pmode, size,
3907 extra)));
3908 else
3909 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3910 negate_rtx (Pmode, size));
3913 return memory_address (NARROWEST_INT_MODE, temp);
3916 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3918 static rtx
3919 mem_autoinc_base (rtx mem)
3921 if (MEM_P (mem))
3923 rtx addr = XEXP (mem, 0);
3924 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3925 return XEXP (addr, 0);
3927 return NULL;
3930 /* A utility routine used here, in reload, and in try_split. The insns
3931 after PREV up to and including LAST are known to adjust the stack,
3932 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3933 placing notes as appropriate. PREV may be NULL, indicating the
3934 entire insn sequence prior to LAST should be scanned.
3936 The set of allowed stack pointer modifications is small:
3937 (1) One or more auto-inc style memory references (aka pushes),
3938 (2) One or more addition/subtraction with the SP as destination,
3939 (3) A single move insn with the SP as destination,
3940 (4) A call_pop insn,
3941 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3943 Insns in the sequence that do not modify the SP are ignored,
3944 except for noreturn calls.
3946 The return value is the amount of adjustment that can be trivially
3947 verified, via immediate operand or auto-inc. If the adjustment
3948 cannot be trivially extracted, the return value is INT_MIN. */
3950 HOST_WIDE_INT
3951 find_args_size_adjust (rtx_insn *insn)
3953 rtx dest, set, pat;
3954 int i;
3956 pat = PATTERN (insn);
3957 set = NULL;
3959 /* Look for a call_pop pattern. */
3960 if (CALL_P (insn))
3962 /* We have to allow non-call_pop patterns for the case
3963 of emit_single_push_insn of a TLS address. */
3964 if (GET_CODE (pat) != PARALLEL)
3965 return 0;
3967 /* All call_pop have a stack pointer adjust in the parallel.
3968 The call itself is always first, and the stack adjust is
3969 usually last, so search from the end. */
3970 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3972 set = XVECEXP (pat, 0, i);
3973 if (GET_CODE (set) != SET)
3974 continue;
3975 dest = SET_DEST (set);
3976 if (dest == stack_pointer_rtx)
3977 break;
3979 /* We'd better have found the stack pointer adjust. */
3980 if (i == 0)
3981 return 0;
3982 /* Fall through to process the extracted SET and DEST
3983 as if it was a standalone insn. */
3985 else if (GET_CODE (pat) == SET)
3986 set = pat;
3987 else if ((set = single_set (insn)) != NULL)
3989 else if (GET_CODE (pat) == PARALLEL)
3991 /* ??? Some older ports use a parallel with a stack adjust
3992 and a store for a PUSH_ROUNDING pattern, rather than a
3993 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3994 /* ??? See h8300 and m68k, pushqi1. */
3995 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3997 set = XVECEXP (pat, 0, i);
3998 if (GET_CODE (set) != SET)
3999 continue;
4000 dest = SET_DEST (set);
4001 if (dest == stack_pointer_rtx)
4002 break;
4004 /* We do not expect an auto-inc of the sp in the parallel. */
4005 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4006 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4007 != stack_pointer_rtx);
4009 if (i < 0)
4010 return 0;
4012 else
4013 return 0;
4015 dest = SET_DEST (set);
4017 /* Look for direct modifications of the stack pointer. */
4018 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4020 /* Look for a trivial adjustment, otherwise assume nothing. */
4021 /* Note that the SPU restore_stack_block pattern refers to
4022 the stack pointer in V4SImode. Consider that non-trivial. */
4023 if (SCALAR_INT_MODE_P (GET_MODE (dest))
4024 && GET_CODE (SET_SRC (set)) == PLUS
4025 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
4026 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
4027 return INTVAL (XEXP (SET_SRC (set), 1));
4028 /* ??? Reload can generate no-op moves, which will be cleaned
4029 up later. Recognize it and continue searching. */
4030 else if (rtx_equal_p (dest, SET_SRC (set)))
4031 return 0;
4032 else
4033 return HOST_WIDE_INT_MIN;
4035 else
4037 rtx mem, addr;
4039 /* Otherwise only think about autoinc patterns. */
4040 if (mem_autoinc_base (dest) == stack_pointer_rtx)
4042 mem = dest;
4043 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4044 != stack_pointer_rtx);
4046 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4047 mem = SET_SRC (set);
4048 else
4049 return 0;
4051 addr = XEXP (mem, 0);
4052 switch (GET_CODE (addr))
4054 case PRE_INC:
4055 case POST_INC:
4056 return GET_MODE_SIZE (GET_MODE (mem));
4057 case PRE_DEC:
4058 case POST_DEC:
4059 return -GET_MODE_SIZE (GET_MODE (mem));
4060 case PRE_MODIFY:
4061 case POST_MODIFY:
4062 addr = XEXP (addr, 1);
4063 gcc_assert (GET_CODE (addr) == PLUS);
4064 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4065 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
4066 return INTVAL (XEXP (addr, 1));
4067 default:
4068 gcc_unreachable ();
4074 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
4076 int args_size = end_args_size;
4077 bool saw_unknown = false;
4078 rtx_insn *insn;
4080 for (insn = last; insn != prev; insn = PREV_INSN (insn))
4082 HOST_WIDE_INT this_delta;
4084 if (!NONDEBUG_INSN_P (insn))
4085 continue;
4087 this_delta = find_args_size_adjust (insn);
4088 if (this_delta == 0)
4090 if (!CALL_P (insn)
4091 || ACCUMULATE_OUTGOING_ARGS
4092 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4093 continue;
4096 gcc_assert (!saw_unknown);
4097 if (this_delta == HOST_WIDE_INT_MIN)
4098 saw_unknown = true;
4100 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
4101 if (STACK_GROWS_DOWNWARD)
4102 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4104 if (saw_unknown)
4105 args_size = INT_MIN;
4106 else
4107 args_size -= this_delta;
4110 return args_size;
4113 #ifdef PUSH_ROUNDING
4114 /* Emit single push insn. */
4116 static void
4117 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4119 rtx dest_addr;
4120 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4121 rtx dest;
4122 enum insn_code icode;
4124 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4125 /* If there is push pattern, use it. Otherwise try old way of throwing
4126 MEM representing push operation to move expander. */
4127 icode = optab_handler (push_optab, mode);
4128 if (icode != CODE_FOR_nothing)
4130 struct expand_operand ops[1];
4132 create_input_operand (&ops[0], x, mode);
4133 if (maybe_expand_insn (icode, 1, ops))
4134 return;
4136 if (GET_MODE_SIZE (mode) == rounded_size)
4137 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4138 /* If we are to pad downward, adjust the stack pointer first and
4139 then store X into the stack location using an offset. This is
4140 because emit_move_insn does not know how to pad; it does not have
4141 access to type. */
4142 else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD)
4144 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4145 HOST_WIDE_INT offset;
4147 emit_move_insn (stack_pointer_rtx,
4148 expand_binop (Pmode,
4149 STACK_GROWS_DOWNWARD ? sub_optab
4150 : add_optab,
4151 stack_pointer_rtx,
4152 gen_int_mode (rounded_size, Pmode),
4153 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4155 offset = (HOST_WIDE_INT) padding_size;
4156 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4157 /* We have already decremented the stack pointer, so get the
4158 previous value. */
4159 offset += (HOST_WIDE_INT) rounded_size;
4161 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4162 /* We have already incremented the stack pointer, so get the
4163 previous value. */
4164 offset -= (HOST_WIDE_INT) rounded_size;
4166 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4167 gen_int_mode (offset, Pmode));
4169 else
4171 if (STACK_GROWS_DOWNWARD)
4172 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4173 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4174 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4175 Pmode));
4176 else
4177 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4178 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4179 gen_int_mode (rounded_size, Pmode));
4181 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4184 dest = gen_rtx_MEM (mode, dest_addr);
4186 if (type != 0)
4188 set_mem_attributes (dest, type, 1);
4190 if (cfun->tail_call_marked)
4191 /* Function incoming arguments may overlap with sibling call
4192 outgoing arguments and we cannot allow reordering of reads
4193 from function arguments with stores to outgoing arguments
4194 of sibling calls. */
4195 set_mem_alias_set (dest, 0);
4197 emit_move_insn (dest, x);
4200 /* Emit and annotate a single push insn. */
4202 static void
4203 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4205 int delta, old_delta = stack_pointer_delta;
4206 rtx_insn *prev = get_last_insn ();
4207 rtx_insn *last;
4209 emit_single_push_insn_1 (mode, x, type);
4211 last = get_last_insn ();
4213 /* Notice the common case where we emitted exactly one insn. */
4214 if (PREV_INSN (last) == prev)
4216 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4217 return;
4220 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4221 gcc_assert (delta == INT_MIN || delta == old_delta);
4223 #endif
4225 /* If reading SIZE bytes from X will end up reading from
4226 Y return the number of bytes that overlap. Return -1
4227 if there is no overlap or -2 if we can't determine
4228 (for example when X and Y have different base registers). */
4230 static int
4231 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4233 rtx tmp = plus_constant (Pmode, x, size);
4234 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4236 if (!CONST_INT_P (sub))
4237 return -2;
4239 HOST_WIDE_INT val = INTVAL (sub);
4241 return IN_RANGE (val, 1, size) ? val : -1;
4244 /* Generate code to push X onto the stack, assuming it has mode MODE and
4245 type TYPE.
4246 MODE is redundant except when X is a CONST_INT (since they don't
4247 carry mode info).
4248 SIZE is an rtx for the size of data to be copied (in bytes),
4249 needed only if X is BLKmode.
4250 Return true if successful. May return false if asked to push a
4251 partial argument during a sibcall optimization (as specified by
4252 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4253 to not overlap.
4255 ALIGN (in bits) is maximum alignment we can assume.
4257 If PARTIAL and REG are both nonzero, then copy that many of the first
4258 bytes of X into registers starting with REG, and push the rest of X.
4259 The amount of space pushed is decreased by PARTIAL bytes.
4260 REG must be a hard register in this case.
4261 If REG is zero but PARTIAL is not, take any all others actions for an
4262 argument partially in registers, but do not actually load any
4263 registers.
4265 EXTRA is the amount in bytes of extra space to leave next to this arg.
4266 This is ignored if an argument block has already been allocated.
4268 On a machine that lacks real push insns, ARGS_ADDR is the address of
4269 the bottom of the argument block for this call. We use indexing off there
4270 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4271 argument block has not been preallocated.
4273 ARGS_SO_FAR is the size of args previously pushed for this call.
4275 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4276 for arguments passed in registers. If nonzero, it will be the number
4277 of bytes required. */
4279 bool
4280 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4281 unsigned int align, int partial, rtx reg, int extra,
4282 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4283 rtx alignment_pad, bool sibcall_p)
4285 rtx xinner;
4286 pad_direction stack_direction
4287 = STACK_GROWS_DOWNWARD ? PAD_DOWNWARD : PAD_UPWARD;
4289 /* Decide where to pad the argument: PAD_DOWNWARD for below,
4290 PAD_UPWARD for above, or PAD_NONE for don't pad it.
4291 Default is below for small data on big-endian machines; else above. */
4292 pad_direction where_pad = targetm.calls.function_arg_padding (mode, type);
4294 /* Invert direction if stack is post-decrement.
4295 FIXME: why? */
4296 if (STACK_PUSH_CODE == POST_DEC)
4297 if (where_pad != PAD_NONE)
4298 where_pad = (where_pad == PAD_DOWNWARD ? PAD_UPWARD : PAD_DOWNWARD);
4300 xinner = x;
4302 int nregs = partial / UNITS_PER_WORD;
4303 rtx *tmp_regs = NULL;
4304 int overlapping = 0;
4306 if (mode == BLKmode
4307 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4309 /* Copy a block into the stack, entirely or partially. */
4311 rtx temp;
4312 int used;
4313 int offset;
4314 int skip;
4316 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4317 used = partial - offset;
4319 if (mode != BLKmode)
4321 /* A value is to be stored in an insufficiently aligned
4322 stack slot; copy via a suitably aligned slot if
4323 necessary. */
4324 size = GEN_INT (GET_MODE_SIZE (mode));
4325 if (!MEM_P (xinner))
4327 temp = assign_temp (type, 1, 1);
4328 emit_move_insn (temp, xinner);
4329 xinner = temp;
4333 gcc_assert (size);
4335 /* USED is now the # of bytes we need not copy to the stack
4336 because registers will take care of them. */
4338 if (partial != 0)
4339 xinner = adjust_address (xinner, BLKmode, used);
4341 /* If the partial register-part of the arg counts in its stack size,
4342 skip the part of stack space corresponding to the registers.
4343 Otherwise, start copying to the beginning of the stack space,
4344 by setting SKIP to 0. */
4345 skip = (reg_parm_stack_space == 0) ? 0 : used;
4347 #ifdef PUSH_ROUNDING
4348 /* Do it with several push insns if that doesn't take lots of insns
4349 and if there is no difficulty with push insns that skip bytes
4350 on the stack for alignment purposes. */
4351 if (args_addr == 0
4352 && PUSH_ARGS
4353 && CONST_INT_P (size)
4354 && skip == 0
4355 && MEM_ALIGN (xinner) >= align
4356 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4357 /* Here we avoid the case of a structure whose weak alignment
4358 forces many pushes of a small amount of data,
4359 and such small pushes do rounding that causes trouble. */
4360 && ((!targetm.slow_unaligned_access (word_mode, align))
4361 || align >= BIGGEST_ALIGNMENT
4362 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4363 == (align / BITS_PER_UNIT)))
4364 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4366 /* Push padding now if padding above and stack grows down,
4367 or if padding below and stack grows up.
4368 But if space already allocated, this has already been done. */
4369 if (extra && args_addr == 0
4370 && where_pad != PAD_NONE && where_pad != stack_direction)
4371 anti_adjust_stack (GEN_INT (extra));
4373 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4375 else
4376 #endif /* PUSH_ROUNDING */
4378 rtx target;
4380 /* Otherwise make space on the stack and copy the data
4381 to the address of that space. */
4383 /* Deduct words put into registers from the size we must copy. */
4384 if (partial != 0)
4386 if (CONST_INT_P (size))
4387 size = GEN_INT (INTVAL (size) - used);
4388 else
4389 size = expand_binop (GET_MODE (size), sub_optab, size,
4390 gen_int_mode (used, GET_MODE (size)),
4391 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4394 /* Get the address of the stack space.
4395 In this case, we do not deal with EXTRA separately.
4396 A single stack adjust will do. */
4397 if (! args_addr)
4399 temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
4400 extra = 0;
4402 else if (CONST_INT_P (args_so_far))
4403 temp = memory_address (BLKmode,
4404 plus_constant (Pmode, args_addr,
4405 skip + INTVAL (args_so_far)));
4406 else
4407 temp = memory_address (BLKmode,
4408 plus_constant (Pmode,
4409 gen_rtx_PLUS (Pmode,
4410 args_addr,
4411 args_so_far),
4412 skip));
4414 if (!ACCUMULATE_OUTGOING_ARGS)
4416 /* If the source is referenced relative to the stack pointer,
4417 copy it to another register to stabilize it. We do not need
4418 to do this if we know that we won't be changing sp. */
4420 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4421 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4422 temp = copy_to_reg (temp);
4425 target = gen_rtx_MEM (BLKmode, temp);
4427 /* We do *not* set_mem_attributes here, because incoming arguments
4428 may overlap with sibling call outgoing arguments and we cannot
4429 allow reordering of reads from function arguments with stores
4430 to outgoing arguments of sibling calls. We do, however, want
4431 to record the alignment of the stack slot. */
4432 /* ALIGN may well be better aligned than TYPE, e.g. due to
4433 PARM_BOUNDARY. Assume the caller isn't lying. */
4434 set_mem_align (target, align);
4436 /* If part should go in registers and pushing to that part would
4437 overwrite some of the values that need to go into regs, load the
4438 overlapping values into temporary pseudos to be moved into the hard
4439 regs at the end after the stack pushing has completed.
4440 We cannot load them directly into the hard regs here because
4441 they can be clobbered by the block move expansions.
4442 See PR 65358. */
4444 if (partial > 0 && reg != 0 && mode == BLKmode
4445 && GET_CODE (reg) != PARALLEL)
4447 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4448 if (overlapping > 0)
4450 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4451 overlapping /= UNITS_PER_WORD;
4453 tmp_regs = XALLOCAVEC (rtx, overlapping);
4455 for (int i = 0; i < overlapping; i++)
4456 tmp_regs[i] = gen_reg_rtx (word_mode);
4458 for (int i = 0; i < overlapping; i++)
4459 emit_move_insn (tmp_regs[i],
4460 operand_subword_force (target, i, mode));
4462 else if (overlapping == -1)
4463 overlapping = 0;
4464 /* Could not determine whether there is overlap.
4465 Fail the sibcall. */
4466 else
4468 overlapping = 0;
4469 if (sibcall_p)
4470 return false;
4473 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4476 else if (partial > 0)
4478 /* Scalar partly in registers. */
4480 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4481 int i;
4482 int not_stack;
4483 /* # bytes of start of argument
4484 that we must make space for but need not store. */
4485 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4486 int args_offset = INTVAL (args_so_far);
4487 int skip;
4489 /* Push padding now if padding above and stack grows down,
4490 or if padding below and stack grows up.
4491 But if space already allocated, this has already been done. */
4492 if (extra && args_addr == 0
4493 && where_pad != PAD_NONE && where_pad != stack_direction)
4494 anti_adjust_stack (GEN_INT (extra));
4496 /* If we make space by pushing it, we might as well push
4497 the real data. Otherwise, we can leave OFFSET nonzero
4498 and leave the space uninitialized. */
4499 if (args_addr == 0)
4500 offset = 0;
4502 /* Now NOT_STACK gets the number of words that we don't need to
4503 allocate on the stack. Convert OFFSET to words too. */
4504 not_stack = (partial - offset) / UNITS_PER_WORD;
4505 offset /= UNITS_PER_WORD;
4507 /* If the partial register-part of the arg counts in its stack size,
4508 skip the part of stack space corresponding to the registers.
4509 Otherwise, start copying to the beginning of the stack space,
4510 by setting SKIP to 0. */
4511 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4513 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4514 x = validize_mem (force_const_mem (mode, x));
4516 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4517 SUBREGs of such registers are not allowed. */
4518 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4519 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4520 x = copy_to_reg (x);
4522 /* Loop over all the words allocated on the stack for this arg. */
4523 /* We can do it by words, because any scalar bigger than a word
4524 has a size a multiple of a word. */
4525 for (i = size - 1; i >= not_stack; i--)
4526 if (i >= not_stack + offset)
4527 if (!emit_push_insn (operand_subword_force (x, i, mode),
4528 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4529 0, args_addr,
4530 GEN_INT (args_offset + ((i - not_stack + skip)
4531 * UNITS_PER_WORD)),
4532 reg_parm_stack_space, alignment_pad, sibcall_p))
4533 return false;
4535 else
4537 rtx addr;
4538 rtx dest;
4540 /* Push padding now if padding above and stack grows down,
4541 or if padding below and stack grows up.
4542 But if space already allocated, this has already been done. */
4543 if (extra && args_addr == 0
4544 && where_pad != PAD_NONE && where_pad != stack_direction)
4545 anti_adjust_stack (GEN_INT (extra));
4547 #ifdef PUSH_ROUNDING
4548 if (args_addr == 0 && PUSH_ARGS)
4549 emit_single_push_insn (mode, x, type);
4550 else
4551 #endif
4553 addr = simplify_gen_binary (PLUS, Pmode, args_addr, args_so_far);
4554 dest = gen_rtx_MEM (mode, memory_address (mode, addr));
4556 /* We do *not* set_mem_attributes here, because incoming arguments
4557 may overlap with sibling call outgoing arguments and we cannot
4558 allow reordering of reads from function arguments with stores
4559 to outgoing arguments of sibling calls. We do, however, want
4560 to record the alignment of the stack slot. */
4561 /* ALIGN may well be better aligned than TYPE, e.g. due to
4562 PARM_BOUNDARY. Assume the caller isn't lying. */
4563 set_mem_align (dest, align);
4565 emit_move_insn (dest, x);
4569 /* Move the partial arguments into the registers and any overlapping
4570 values that we moved into the pseudos in tmp_regs. */
4571 if (partial > 0 && reg != 0)
4573 /* Handle calls that pass values in multiple non-contiguous locations.
4574 The Irix 6 ABI has examples of this. */
4575 if (GET_CODE (reg) == PARALLEL)
4576 emit_group_load (reg, x, type, -1);
4577 else
4579 gcc_assert (partial % UNITS_PER_WORD == 0);
4580 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4582 for (int i = 0; i < overlapping; i++)
4583 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4584 + nregs - overlapping + i),
4585 tmp_regs[i]);
4590 if (extra && args_addr == 0 && where_pad == stack_direction)
4591 anti_adjust_stack (GEN_INT (extra));
4593 if (alignment_pad && args_addr == 0)
4594 anti_adjust_stack (alignment_pad);
4596 return true;
4599 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4600 operations. */
4602 static rtx
4603 get_subtarget (rtx x)
4605 return (optimize
4606 || x == 0
4607 /* Only registers can be subtargets. */
4608 || !REG_P (x)
4609 /* Don't use hard regs to avoid extending their life. */
4610 || REGNO (x) < FIRST_PSEUDO_REGISTER
4611 ? 0 : x);
4614 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4615 FIELD is a bitfield. Returns true if the optimization was successful,
4616 and there's nothing else to do. */
4618 static bool
4619 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4620 unsigned HOST_WIDE_INT bitpos,
4621 unsigned HOST_WIDE_INT bitregion_start,
4622 unsigned HOST_WIDE_INT bitregion_end,
4623 machine_mode mode1, rtx str_rtx,
4624 tree to, tree src, bool reverse)
4626 machine_mode str_mode = GET_MODE (str_rtx);
4627 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4628 tree op0, op1;
4629 rtx value, result;
4630 optab binop;
4631 gimple *srcstmt;
4632 enum tree_code code;
4634 if (mode1 != VOIDmode
4635 || bitsize >= BITS_PER_WORD
4636 || str_bitsize > BITS_PER_WORD
4637 || TREE_SIDE_EFFECTS (to)
4638 || TREE_THIS_VOLATILE (to))
4639 return false;
4641 STRIP_NOPS (src);
4642 if (TREE_CODE (src) != SSA_NAME)
4643 return false;
4644 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4645 return false;
4647 srcstmt = get_gimple_for_ssa_name (src);
4648 if (!srcstmt
4649 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4650 return false;
4652 code = gimple_assign_rhs_code (srcstmt);
4654 op0 = gimple_assign_rhs1 (srcstmt);
4656 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4657 to find its initialization. Hopefully the initialization will
4658 be from a bitfield load. */
4659 if (TREE_CODE (op0) == SSA_NAME)
4661 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4663 /* We want to eventually have OP0 be the same as TO, which
4664 should be a bitfield. */
4665 if (!op0stmt
4666 || !is_gimple_assign (op0stmt)
4667 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4668 return false;
4669 op0 = gimple_assign_rhs1 (op0stmt);
4672 op1 = gimple_assign_rhs2 (srcstmt);
4674 if (!operand_equal_p (to, op0, 0))
4675 return false;
4677 if (MEM_P (str_rtx))
4679 unsigned HOST_WIDE_INT offset1;
4681 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4682 str_bitsize = BITS_PER_WORD;
4684 scalar_int_mode best_mode;
4685 if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
4686 MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
4687 return false;
4688 str_mode = best_mode;
4689 str_bitsize = GET_MODE_BITSIZE (best_mode);
4691 offset1 = bitpos;
4692 bitpos %= str_bitsize;
4693 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4694 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4696 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4697 return false;
4698 else
4699 gcc_assert (!reverse);
4701 /* If the bit field covers the whole REG/MEM, store_field
4702 will likely generate better code. */
4703 if (bitsize >= str_bitsize)
4704 return false;
4706 /* We can't handle fields split across multiple entities. */
4707 if (bitpos + bitsize > str_bitsize)
4708 return false;
4710 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4711 bitpos = str_bitsize - bitpos - bitsize;
4713 switch (code)
4715 case PLUS_EXPR:
4716 case MINUS_EXPR:
4717 /* For now, just optimize the case of the topmost bitfield
4718 where we don't need to do any masking and also
4719 1 bit bitfields where xor can be used.
4720 We might win by one instruction for the other bitfields
4721 too if insv/extv instructions aren't used, so that
4722 can be added later. */
4723 if ((reverse || bitpos + bitsize != str_bitsize)
4724 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4725 break;
4727 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4728 value = convert_modes (str_mode,
4729 TYPE_MODE (TREE_TYPE (op1)), value,
4730 TYPE_UNSIGNED (TREE_TYPE (op1)));
4732 /* We may be accessing data outside the field, which means
4733 we can alias adjacent data. */
4734 if (MEM_P (str_rtx))
4736 str_rtx = shallow_copy_rtx (str_rtx);
4737 set_mem_alias_set (str_rtx, 0);
4738 set_mem_expr (str_rtx, 0);
4741 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4743 value = expand_and (str_mode, value, const1_rtx, NULL);
4744 binop = xor_optab;
4746 else
4747 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4749 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4750 if (reverse)
4751 value = flip_storage_order (str_mode, value);
4752 result = expand_binop (str_mode, binop, str_rtx,
4753 value, str_rtx, 1, OPTAB_WIDEN);
4754 if (result != str_rtx)
4755 emit_move_insn (str_rtx, result);
4756 return true;
4758 case BIT_IOR_EXPR:
4759 case BIT_XOR_EXPR:
4760 if (TREE_CODE (op1) != INTEGER_CST)
4761 break;
4762 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4763 value = convert_modes (str_mode,
4764 TYPE_MODE (TREE_TYPE (op1)), value,
4765 TYPE_UNSIGNED (TREE_TYPE (op1)));
4767 /* We may be accessing data outside the field, which means
4768 we can alias adjacent data. */
4769 if (MEM_P (str_rtx))
4771 str_rtx = shallow_copy_rtx (str_rtx);
4772 set_mem_alias_set (str_rtx, 0);
4773 set_mem_expr (str_rtx, 0);
4776 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4777 if (bitpos + bitsize != str_bitsize)
4779 rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4780 str_mode);
4781 value = expand_and (str_mode, value, mask, NULL_RTX);
4783 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4784 if (reverse)
4785 value = flip_storage_order (str_mode, value);
4786 result = expand_binop (str_mode, binop, str_rtx,
4787 value, str_rtx, 1, OPTAB_WIDEN);
4788 if (result != str_rtx)
4789 emit_move_insn (str_rtx, result);
4790 return true;
4792 default:
4793 break;
4796 return false;
4799 /* In the C++ memory model, consecutive bit fields in a structure are
4800 considered one memory location.
4802 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4803 returns the bit range of consecutive bits in which this COMPONENT_REF
4804 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4805 and *OFFSET may be adjusted in the process.
4807 If the access does not need to be restricted, 0 is returned in both
4808 *BITSTART and *BITEND. */
4810 void
4811 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4812 unsigned HOST_WIDE_INT *bitend,
4813 tree exp,
4814 HOST_WIDE_INT *bitpos,
4815 tree *offset)
4817 HOST_WIDE_INT bitoffset;
4818 tree field, repr;
4820 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4822 field = TREE_OPERAND (exp, 1);
4823 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4824 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4825 need to limit the range we can access. */
4826 if (!repr)
4828 *bitstart = *bitend = 0;
4829 return;
4832 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4833 part of a larger bit field, then the representative does not serve any
4834 useful purpose. This can occur in Ada. */
4835 if (handled_component_p (TREE_OPERAND (exp, 0)))
4837 machine_mode rmode;
4838 HOST_WIDE_INT rbitsize, rbitpos;
4839 tree roffset;
4840 int unsignedp, reversep, volatilep = 0;
4841 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4842 &roffset, &rmode, &unsignedp, &reversep,
4843 &volatilep);
4844 if ((rbitpos % BITS_PER_UNIT) != 0)
4846 *bitstart = *bitend = 0;
4847 return;
4851 /* Compute the adjustment to bitpos from the offset of the field
4852 relative to the representative. DECL_FIELD_OFFSET of field and
4853 repr are the same by construction if they are not constants,
4854 see finish_bitfield_layout. */
4855 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4856 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4857 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4858 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4859 else
4860 bitoffset = 0;
4861 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4862 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4864 /* If the adjustment is larger than bitpos, we would have a negative bit
4865 position for the lower bound and this may wreak havoc later. Adjust
4866 offset and bitpos to make the lower bound non-negative in that case. */
4867 if (bitoffset > *bitpos)
4869 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4870 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4872 *bitpos += adjust;
4873 if (*offset == NULL_TREE)
4874 *offset = size_int (-adjust / BITS_PER_UNIT);
4875 else
4876 *offset
4877 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4878 *bitstart = 0;
4880 else
4881 *bitstart = *bitpos - bitoffset;
4883 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4886 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4887 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4888 DECL_RTL was not set yet, return NORTL. */
4890 static inline bool
4891 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4893 if (TREE_CODE (addr) != ADDR_EXPR)
4894 return false;
4896 tree base = TREE_OPERAND (addr, 0);
4898 if (!DECL_P (base)
4899 || TREE_ADDRESSABLE (base)
4900 || DECL_MODE (base) == BLKmode)
4901 return false;
4903 if (!DECL_RTL_SET_P (base))
4904 return nortl;
4906 return (!MEM_P (DECL_RTL (base)));
4909 /* Returns true if the MEM_REF REF refers to an object that does not
4910 reside in memory and has non-BLKmode. */
4912 static inline bool
4913 mem_ref_refers_to_non_mem_p (tree ref)
4915 tree base = TREE_OPERAND (ref, 0);
4916 return addr_expr_of_non_mem_decl_p_1 (base, false);
4919 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4920 is true, try generating a nontemporal store. */
4922 void
4923 expand_assignment (tree to, tree from, bool nontemporal)
4925 rtx to_rtx = 0;
4926 rtx result;
4927 machine_mode mode;
4928 unsigned int align;
4929 enum insn_code icode;
4931 /* Don't crash if the lhs of the assignment was erroneous. */
4932 if (TREE_CODE (to) == ERROR_MARK)
4934 expand_normal (from);
4935 return;
4938 /* Optimize away no-op moves without side-effects. */
4939 if (operand_equal_p (to, from, 0))
4940 return;
4942 /* Handle misaligned stores. */
4943 mode = TYPE_MODE (TREE_TYPE (to));
4944 if ((TREE_CODE (to) == MEM_REF
4945 || TREE_CODE (to) == TARGET_MEM_REF)
4946 && mode != BLKmode
4947 && !mem_ref_refers_to_non_mem_p (to)
4948 && ((align = get_object_alignment (to))
4949 < GET_MODE_ALIGNMENT (mode))
4950 && (((icode = optab_handler (movmisalign_optab, mode))
4951 != CODE_FOR_nothing)
4952 || targetm.slow_unaligned_access (mode, align)))
4954 rtx reg, mem;
4956 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4957 reg = force_not_mem (reg);
4958 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4959 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4960 reg = flip_storage_order (mode, reg);
4962 if (icode != CODE_FOR_nothing)
4964 struct expand_operand ops[2];
4966 create_fixed_operand (&ops[0], mem);
4967 create_input_operand (&ops[1], reg, mode);
4968 /* The movmisalign<mode> pattern cannot fail, else the assignment
4969 would silently be omitted. */
4970 expand_insn (icode, 2, ops);
4972 else
4973 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4974 false);
4975 return;
4978 /* Assignment of a structure component needs special treatment
4979 if the structure component's rtx is not simply a MEM.
4980 Assignment of an array element at a constant index, and assignment of
4981 an array element in an unaligned packed structure field, has the same
4982 problem. Same for (partially) storing into a non-memory object. */
4983 if (handled_component_p (to)
4984 || (TREE_CODE (to) == MEM_REF
4985 && (REF_REVERSE_STORAGE_ORDER (to)
4986 || mem_ref_refers_to_non_mem_p (to)))
4987 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4989 machine_mode mode1;
4990 HOST_WIDE_INT bitsize, bitpos;
4991 unsigned HOST_WIDE_INT bitregion_start = 0;
4992 unsigned HOST_WIDE_INT bitregion_end = 0;
4993 tree offset;
4994 int unsignedp, reversep, volatilep = 0;
4995 tree tem;
4997 push_temp_slots ();
4998 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4999 &unsignedp, &reversep, &volatilep);
5001 /* Make sure bitpos is not negative, it can wreak havoc later. */
5002 if (bitpos < 0)
5004 gcc_assert (offset == NULL_TREE);
5005 offset = size_int (bitpos >> LOG2_BITS_PER_UNIT);
5006 bitpos &= BITS_PER_UNIT - 1;
5009 if (TREE_CODE (to) == COMPONENT_REF
5010 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5011 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5012 /* The C++ memory model naturally applies to byte-aligned fields.
5013 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5014 BITSIZE are not byte-aligned, there is no need to limit the range
5015 we can access. This can occur with packed structures in Ada. */
5016 else if (bitsize > 0
5017 && bitsize % BITS_PER_UNIT == 0
5018 && bitpos % BITS_PER_UNIT == 0)
5020 bitregion_start = bitpos;
5021 bitregion_end = bitpos + bitsize - 1;
5024 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5026 /* If the field has a mode, we want to access it in the
5027 field's mode, not the computed mode.
5028 If a MEM has VOIDmode (external with incomplete type),
5029 use BLKmode for it instead. */
5030 if (MEM_P (to_rtx))
5032 if (mode1 != VOIDmode)
5033 to_rtx = adjust_address (to_rtx, mode1, 0);
5034 else if (GET_MODE (to_rtx) == VOIDmode)
5035 to_rtx = adjust_address (to_rtx, BLKmode, 0);
5038 if (offset != 0)
5040 machine_mode address_mode;
5041 rtx offset_rtx;
5043 if (!MEM_P (to_rtx))
5045 /* We can get constant negative offsets into arrays with broken
5046 user code. Translate this to a trap instead of ICEing. */
5047 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5048 expand_builtin_trap ();
5049 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5052 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5053 address_mode = get_address_mode (to_rtx);
5054 if (GET_MODE (offset_rtx) != address_mode)
5056 /* We cannot be sure that the RTL in offset_rtx is valid outside
5057 of a memory address context, so force it into a register
5058 before attempting to convert it to the desired mode. */
5059 offset_rtx = force_operand (offset_rtx, NULL_RTX);
5060 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5063 /* If we have an expression in OFFSET_RTX and a non-zero
5064 byte offset in BITPOS, adding the byte offset before the
5065 OFFSET_RTX results in better intermediate code, which makes
5066 later rtl optimization passes perform better.
5068 We prefer intermediate code like this:
5070 r124:DI=r123:DI+0x18
5071 [r124:DI]=r121:DI
5073 ... instead of ...
5075 r124:DI=r123:DI+0x10
5076 [r124:DI+0x8]=r121:DI
5078 This is only done for aligned data values, as these can
5079 be expected to result in single move instructions. */
5080 if (mode1 != VOIDmode
5081 && bitpos != 0
5082 && bitsize > 0
5083 && (bitpos % bitsize) == 0
5084 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
5085 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5087 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
5088 bitregion_start = 0;
5089 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
5090 bitregion_end -= bitpos;
5091 bitpos = 0;
5094 to_rtx = offset_address (to_rtx, offset_rtx,
5095 highest_pow2_factor_for_target (to,
5096 offset));
5099 /* No action is needed if the target is not a memory and the field
5100 lies completely outside that target. This can occur if the source
5101 code contains an out-of-bounds access to a small array. */
5102 if (!MEM_P (to_rtx)
5103 && GET_MODE (to_rtx) != BLKmode
5104 && (unsigned HOST_WIDE_INT) bitpos
5105 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
5107 expand_normal (from);
5108 result = NULL;
5110 /* Handle expand_expr of a complex value returning a CONCAT. */
5111 else if (GET_CODE (to_rtx) == CONCAT)
5113 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
5114 if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE (to_rtx)
5115 && COMPLEX_MODE_P (GET_MODE (to_rtx))
5116 && bitpos == 0
5117 && bitsize == mode_bitsize)
5118 result = store_expr (from, to_rtx, false, nontemporal, reversep);
5119 else if (bitsize == mode_bitsize / 2
5120 && (bitpos == 0 || bitpos == mode_bitsize / 2))
5121 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
5122 nontemporal, reversep);
5123 else if (bitpos + bitsize <= mode_bitsize / 2)
5124 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5125 bitregion_start, bitregion_end,
5126 mode1, from, get_alias_set (to),
5127 nontemporal, reversep);
5128 else if (bitpos >= mode_bitsize / 2)
5129 result = store_field (XEXP (to_rtx, 1), bitsize,
5130 bitpos - mode_bitsize / 2,
5131 bitregion_start, bitregion_end,
5132 mode1, from, get_alias_set (to),
5133 nontemporal, reversep);
5134 else if (bitpos == 0 && bitsize == mode_bitsize)
5136 result = expand_normal (from);
5137 if (GET_CODE (result) == CONCAT)
5139 machine_mode to_mode = GET_MODE_INNER (GET_MODE (to_rtx));
5140 machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5141 rtx from_real
5142 = simplify_gen_subreg (to_mode, XEXP (result, 0),
5143 from_mode, 0);
5144 rtx from_imag
5145 = simplify_gen_subreg (to_mode, XEXP (result, 1),
5146 from_mode, 1);
5147 emit_move_insn (XEXP (to_rtx, 0), from_real);
5148 emit_move_insn (XEXP (to_rtx, 1), from_imag);
5150 else
5152 rtx from_rtx
5153 = simplify_gen_subreg (GET_MODE (to_rtx), result,
5154 TYPE_MODE (TREE_TYPE (from)), 0);
5155 emit_move_insn (XEXP (to_rtx, 0),
5156 read_complex_part (from_rtx, false));
5157 emit_move_insn (XEXP (to_rtx, 1),
5158 read_complex_part (from_rtx, true));
5161 else
5163 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5164 GET_MODE_SIZE (GET_MODE (to_rtx)));
5165 write_complex_part (temp, XEXP (to_rtx, 0), false);
5166 write_complex_part (temp, XEXP (to_rtx, 1), true);
5167 result = store_field (temp, bitsize, bitpos,
5168 bitregion_start, bitregion_end,
5169 mode1, from, get_alias_set (to),
5170 nontemporal, reversep);
5171 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5172 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5175 else
5177 if (MEM_P (to_rtx))
5179 /* If the field is at offset zero, we could have been given the
5180 DECL_RTX of the parent struct. Don't munge it. */
5181 to_rtx = shallow_copy_rtx (to_rtx);
5182 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5183 if (volatilep)
5184 MEM_VOLATILE_P (to_rtx) = 1;
5187 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5188 bitregion_start, bitregion_end,
5189 mode1, to_rtx, to, from,
5190 reversep))
5191 result = NULL;
5192 else
5193 result = store_field (to_rtx, bitsize, bitpos,
5194 bitregion_start, bitregion_end,
5195 mode1, from, get_alias_set (to),
5196 nontemporal, reversep);
5199 if (result)
5200 preserve_temp_slots (result);
5201 pop_temp_slots ();
5202 return;
5205 /* If the rhs is a function call and its value is not an aggregate,
5206 call the function before we start to compute the lhs.
5207 This is needed for correct code for cases such as
5208 val = setjmp (buf) on machines where reference to val
5209 requires loading up part of an address in a separate insn.
5211 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5212 since it might be a promoted variable where the zero- or sign- extension
5213 needs to be done. Handling this in the normal way is safe because no
5214 computation is done before the call. The same is true for SSA names. */
5215 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5216 && COMPLETE_TYPE_P (TREE_TYPE (from))
5217 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5218 && ! (((VAR_P (to)
5219 || TREE_CODE (to) == PARM_DECL
5220 || TREE_CODE (to) == RESULT_DECL)
5221 && REG_P (DECL_RTL (to)))
5222 || TREE_CODE (to) == SSA_NAME))
5224 rtx value;
5225 rtx bounds;
5227 push_temp_slots ();
5228 value = expand_normal (from);
5230 /* Split value and bounds to store them separately. */
5231 chkp_split_slot (value, &value, &bounds);
5233 if (to_rtx == 0)
5234 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5236 /* Handle calls that return values in multiple non-contiguous locations.
5237 The Irix 6 ABI has examples of this. */
5238 if (GET_CODE (to_rtx) == PARALLEL)
5240 if (GET_CODE (value) == PARALLEL)
5241 emit_group_move (to_rtx, value);
5242 else
5243 emit_group_load (to_rtx, value, TREE_TYPE (from),
5244 int_size_in_bytes (TREE_TYPE (from)));
5246 else if (GET_CODE (value) == PARALLEL)
5247 emit_group_store (to_rtx, value, TREE_TYPE (from),
5248 int_size_in_bytes (TREE_TYPE (from)));
5249 else if (GET_MODE (to_rtx) == BLKmode)
5251 /* Handle calls that return BLKmode values in registers. */
5252 if (REG_P (value))
5253 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5254 else
5255 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5257 else
5259 if (POINTER_TYPE_P (TREE_TYPE (to)))
5260 value = convert_memory_address_addr_space
5261 (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value,
5262 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5264 emit_move_insn (to_rtx, value);
5267 /* Store bounds if required. */
5268 if (bounds
5269 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5271 gcc_assert (MEM_P (to_rtx));
5272 chkp_emit_bounds_store (bounds, value, to_rtx);
5275 preserve_temp_slots (to_rtx);
5276 pop_temp_slots ();
5277 return;
5280 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5281 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5283 /* Don't move directly into a return register. */
5284 if (TREE_CODE (to) == RESULT_DECL
5285 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5287 rtx temp;
5289 push_temp_slots ();
5291 /* If the source is itself a return value, it still is in a pseudo at
5292 this point so we can move it back to the return register directly. */
5293 if (REG_P (to_rtx)
5294 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5295 && TREE_CODE (from) != CALL_EXPR)
5296 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5297 else
5298 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5300 /* Handle calls that return values in multiple non-contiguous locations.
5301 The Irix 6 ABI has examples of this. */
5302 if (GET_CODE (to_rtx) == PARALLEL)
5304 if (GET_CODE (temp) == PARALLEL)
5305 emit_group_move (to_rtx, temp);
5306 else
5307 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5308 int_size_in_bytes (TREE_TYPE (from)));
5310 else if (temp)
5311 emit_move_insn (to_rtx, temp);
5313 preserve_temp_slots (to_rtx);
5314 pop_temp_slots ();
5315 return;
5318 /* In case we are returning the contents of an object which overlaps
5319 the place the value is being stored, use a safe function when copying
5320 a value through a pointer into a structure value return block. */
5321 if (TREE_CODE (to) == RESULT_DECL
5322 && TREE_CODE (from) == INDIRECT_REF
5323 && ADDR_SPACE_GENERIC_P
5324 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5325 && refs_may_alias_p (to, from)
5326 && cfun->returns_struct
5327 && !cfun->returns_pcc_struct)
5329 rtx from_rtx, size;
5331 push_temp_slots ();
5332 size = expr_size (from);
5333 from_rtx = expand_normal (from);
5335 emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5337 preserve_temp_slots (to_rtx);
5338 pop_temp_slots ();
5339 return;
5342 /* Compute FROM and store the value in the rtx we got. */
5344 push_temp_slots ();
5345 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5346 preserve_temp_slots (result);
5347 pop_temp_slots ();
5348 return;
5351 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5352 succeeded, false otherwise. */
5354 bool
5355 emit_storent_insn (rtx to, rtx from)
5357 struct expand_operand ops[2];
5358 machine_mode mode = GET_MODE (to);
5359 enum insn_code code = optab_handler (storent_optab, mode);
5361 if (code == CODE_FOR_nothing)
5362 return false;
5364 create_fixed_operand (&ops[0], to);
5365 create_input_operand (&ops[1], from, mode);
5366 return maybe_expand_insn (code, 2, ops);
5369 /* Generate code for computing expression EXP,
5370 and storing the value into TARGET.
5372 If the mode is BLKmode then we may return TARGET itself.
5373 It turns out that in BLKmode it doesn't cause a problem.
5374 because C has no operators that could combine two different
5375 assignments into the same BLKmode object with different values
5376 with no sequence point. Will other languages need this to
5377 be more thorough?
5379 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5380 stack, and block moves may need to be treated specially.
5382 If NONTEMPORAL is true, try using a nontemporal store instruction.
5384 If REVERSE is true, the store is to be done in reverse order.
5386 If BTARGET is not NULL then computed bounds of EXP are
5387 associated with BTARGET. */
5390 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5391 bool nontemporal, bool reverse, tree btarget)
5393 rtx temp;
5394 rtx alt_rtl = NULL_RTX;
5395 location_t loc = curr_insn_location ();
5397 if (VOID_TYPE_P (TREE_TYPE (exp)))
5399 /* C++ can generate ?: expressions with a throw expression in one
5400 branch and an rvalue in the other. Here, we resolve attempts to
5401 store the throw expression's nonexistent result. */
5402 gcc_assert (!call_param_p);
5403 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5404 return NULL_RTX;
5406 if (TREE_CODE (exp) == COMPOUND_EXPR)
5408 /* Perform first part of compound expression, then assign from second
5409 part. */
5410 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5411 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5412 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5413 call_param_p, nontemporal, reverse,
5414 btarget);
5416 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5418 /* For conditional expression, get safe form of the target. Then
5419 test the condition, doing the appropriate assignment on either
5420 side. This avoids the creation of unnecessary temporaries.
5421 For non-BLKmode, it is more efficient not to do this. */
5423 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5425 do_pending_stack_adjust ();
5426 NO_DEFER_POP;
5427 jumpifnot (TREE_OPERAND (exp, 0), lab1,
5428 profile_probability::uninitialized ());
5429 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5430 nontemporal, reverse, btarget);
5431 emit_jump_insn (targetm.gen_jump (lab2));
5432 emit_barrier ();
5433 emit_label (lab1);
5434 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5435 nontemporal, reverse, btarget);
5436 emit_label (lab2);
5437 OK_DEFER_POP;
5439 return NULL_RTX;
5441 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5442 /* If this is a scalar in a register that is stored in a wider mode
5443 than the declared mode, compute the result into its declared mode
5444 and then convert to the wider mode. Our value is the computed
5445 expression. */
5447 rtx inner_target = 0;
5448 scalar_int_mode outer_mode = subreg_unpromoted_mode (target);
5449 scalar_int_mode inner_mode = subreg_promoted_mode (target);
5451 /* We can do the conversion inside EXP, which will often result
5452 in some optimizations. Do the conversion in two steps: first
5453 change the signedness, if needed, then the extend. But don't
5454 do this if the type of EXP is a subtype of something else
5455 since then the conversion might involve more than just
5456 converting modes. */
5457 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5458 && TREE_TYPE (TREE_TYPE (exp)) == 0
5459 && GET_MODE_PRECISION (outer_mode)
5460 == TYPE_PRECISION (TREE_TYPE (exp)))
5462 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5463 TYPE_UNSIGNED (TREE_TYPE (exp))))
5465 /* Some types, e.g. Fortran's logical*4, won't have a signed
5466 version, so use the mode instead. */
5467 tree ntype
5468 = (signed_or_unsigned_type_for
5469 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5470 if (ntype == NULL)
5471 ntype = lang_hooks.types.type_for_mode
5472 (TYPE_MODE (TREE_TYPE (exp)),
5473 SUBREG_PROMOTED_SIGN (target));
5475 exp = fold_convert_loc (loc, ntype, exp);
5478 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5479 (inner_mode, SUBREG_PROMOTED_SIGN (target)),
5480 exp);
5482 inner_target = SUBREG_REG (target);
5485 temp = expand_expr (exp, inner_target, VOIDmode,
5486 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5488 /* Handle bounds returned by call. */
5489 if (TREE_CODE (exp) == CALL_EXPR)
5491 rtx bounds;
5492 chkp_split_slot (temp, &temp, &bounds);
5493 if (bounds && btarget)
5495 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5496 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5497 chkp_set_rtl_bounds (btarget, tmp);
5501 /* If TEMP is a VOIDmode constant, use convert_modes to make
5502 sure that we properly convert it. */
5503 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5505 temp = convert_modes (outer_mode, TYPE_MODE (TREE_TYPE (exp)),
5506 temp, SUBREG_PROMOTED_SIGN (target));
5507 temp = convert_modes (inner_mode, outer_mode, temp,
5508 SUBREG_PROMOTED_SIGN (target));
5511 convert_move (SUBREG_REG (target), temp,
5512 SUBREG_PROMOTED_SIGN (target));
5514 return NULL_RTX;
5516 else if ((TREE_CODE (exp) == STRING_CST
5517 || (TREE_CODE (exp) == MEM_REF
5518 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5519 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5520 == STRING_CST
5521 && integer_zerop (TREE_OPERAND (exp, 1))))
5522 && !nontemporal && !call_param_p
5523 && MEM_P (target))
5525 /* Optimize initialization of an array with a STRING_CST. */
5526 HOST_WIDE_INT exp_len, str_copy_len;
5527 rtx dest_mem;
5528 tree str = TREE_CODE (exp) == STRING_CST
5529 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5531 exp_len = int_expr_size (exp);
5532 if (exp_len <= 0)
5533 goto normal_expr;
5535 if (TREE_STRING_LENGTH (str) <= 0)
5536 goto normal_expr;
5538 str_copy_len = strlen (TREE_STRING_POINTER (str));
5539 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5540 goto normal_expr;
5542 str_copy_len = TREE_STRING_LENGTH (str);
5543 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5544 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5546 str_copy_len += STORE_MAX_PIECES - 1;
5547 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5549 str_copy_len = MIN (str_copy_len, exp_len);
5550 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5551 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5552 MEM_ALIGN (target), false))
5553 goto normal_expr;
5555 dest_mem = target;
5557 dest_mem = store_by_pieces (dest_mem,
5558 str_copy_len, builtin_strncpy_read_str,
5559 CONST_CAST (char *,
5560 TREE_STRING_POINTER (str)),
5561 MEM_ALIGN (target), false,
5562 exp_len > str_copy_len ? 1 : 0);
5563 if (exp_len > str_copy_len)
5564 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5565 GEN_INT (exp_len - str_copy_len),
5566 BLOCK_OP_NORMAL);
5567 return NULL_RTX;
5569 else
5571 rtx tmp_target;
5573 normal_expr:
5574 /* If we want to use a nontemporal or a reverse order store, force the
5575 value into a register first. */
5576 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5577 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5578 (call_param_p
5579 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5580 &alt_rtl, false);
5582 /* Handle bounds returned by call. */
5583 if (TREE_CODE (exp) == CALL_EXPR)
5585 rtx bounds;
5586 chkp_split_slot (temp, &temp, &bounds);
5587 if (bounds && btarget)
5589 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5590 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5591 chkp_set_rtl_bounds (btarget, tmp);
5596 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5597 the same as that of TARGET, adjust the constant. This is needed, for
5598 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5599 only a word-sized value. */
5600 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5601 && TREE_CODE (exp) != ERROR_MARK
5602 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5603 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5604 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5606 /* If value was not generated in the target, store it there.
5607 Convert the value to TARGET's type first if necessary and emit the
5608 pending incrementations that have been queued when expanding EXP.
5609 Note that we cannot emit the whole queue blindly because this will
5610 effectively disable the POST_INC optimization later.
5612 If TEMP and TARGET compare equal according to rtx_equal_p, but
5613 one or both of them are volatile memory refs, we have to distinguish
5614 two cases:
5615 - expand_expr has used TARGET. In this case, we must not generate
5616 another copy. This can be detected by TARGET being equal according
5617 to == .
5618 - expand_expr has not used TARGET - that means that the source just
5619 happens to have the same RTX form. Since temp will have been created
5620 by expand_expr, it will compare unequal according to == .
5621 We must generate a copy in this case, to reach the correct number
5622 of volatile memory references. */
5624 if ((! rtx_equal_p (temp, target)
5625 || (temp != target && (side_effects_p (temp)
5626 || side_effects_p (target))))
5627 && TREE_CODE (exp) != ERROR_MARK
5628 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5629 but TARGET is not valid memory reference, TEMP will differ
5630 from TARGET although it is really the same location. */
5631 && !(alt_rtl
5632 && rtx_equal_p (alt_rtl, target)
5633 && !side_effects_p (alt_rtl)
5634 && !side_effects_p (target))
5635 /* If there's nothing to copy, don't bother. Don't call
5636 expr_size unless necessary, because some front-ends (C++)
5637 expr_size-hook must not be given objects that are not
5638 supposed to be bit-copied or bit-initialized. */
5639 && expr_size (exp) != const0_rtx)
5641 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5643 if (GET_MODE (target) == BLKmode)
5645 /* Handle calls that return BLKmode values in registers. */
5646 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5647 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5648 else
5649 store_bit_field (target,
5650 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5651 0, 0, 0, GET_MODE (temp), temp, reverse);
5653 else
5654 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5657 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5659 /* Handle copying a string constant into an array. The string
5660 constant may be shorter than the array. So copy just the string's
5661 actual length, and clear the rest. First get the size of the data
5662 type of the string, which is actually the size of the target. */
5663 rtx size = expr_size (exp);
5665 if (CONST_INT_P (size)
5666 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5667 emit_block_move (target, temp, size,
5668 (call_param_p
5669 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5670 else
5672 machine_mode pointer_mode
5673 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5674 machine_mode address_mode = get_address_mode (target);
5676 /* Compute the size of the data to copy from the string. */
5677 tree copy_size
5678 = size_binop_loc (loc, MIN_EXPR,
5679 make_tree (sizetype, size),
5680 size_int (TREE_STRING_LENGTH (exp)));
5681 rtx copy_size_rtx
5682 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5683 (call_param_p
5684 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5685 rtx_code_label *label = 0;
5687 /* Copy that much. */
5688 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5689 TYPE_UNSIGNED (sizetype));
5690 emit_block_move (target, temp, copy_size_rtx,
5691 (call_param_p
5692 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5694 /* Figure out how much is left in TARGET that we have to clear.
5695 Do all calculations in pointer_mode. */
5696 if (CONST_INT_P (copy_size_rtx))
5698 size = plus_constant (address_mode, size,
5699 -INTVAL (copy_size_rtx));
5700 target = adjust_address (target, BLKmode,
5701 INTVAL (copy_size_rtx));
5703 else
5705 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5706 copy_size_rtx, NULL_RTX, 0,
5707 OPTAB_LIB_WIDEN);
5709 if (GET_MODE (copy_size_rtx) != address_mode)
5710 copy_size_rtx = convert_to_mode (address_mode,
5711 copy_size_rtx,
5712 TYPE_UNSIGNED (sizetype));
5714 target = offset_address (target, copy_size_rtx,
5715 highest_pow2_factor (copy_size));
5716 label = gen_label_rtx ();
5717 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5718 GET_MODE (size), 0, label);
5721 if (size != const0_rtx)
5722 clear_storage (target, size, BLOCK_OP_NORMAL);
5724 if (label)
5725 emit_label (label);
5728 /* Handle calls that return values in multiple non-contiguous locations.
5729 The Irix 6 ABI has examples of this. */
5730 else if (GET_CODE (target) == PARALLEL)
5732 if (GET_CODE (temp) == PARALLEL)
5733 emit_group_move (target, temp);
5734 else
5735 emit_group_load (target, temp, TREE_TYPE (exp),
5736 int_size_in_bytes (TREE_TYPE (exp)));
5738 else if (GET_CODE (temp) == PARALLEL)
5739 emit_group_store (target, temp, TREE_TYPE (exp),
5740 int_size_in_bytes (TREE_TYPE (exp)));
5741 else if (GET_MODE (temp) == BLKmode)
5742 emit_block_move (target, temp, expr_size (exp),
5743 (call_param_p
5744 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5745 /* If we emit a nontemporal store, there is nothing else to do. */
5746 else if (nontemporal && emit_storent_insn (target, temp))
5748 else
5750 if (reverse)
5751 temp = flip_storage_order (GET_MODE (target), temp);
5752 temp = force_operand (temp, target);
5753 if (temp != target)
5754 emit_move_insn (target, temp);
5758 return NULL_RTX;
5761 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5763 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5764 bool reverse)
5766 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5767 reverse, NULL);
5770 /* Return true if field F of structure TYPE is a flexible array. */
5772 static bool
5773 flexible_array_member_p (const_tree f, const_tree type)
5775 const_tree tf;
5777 tf = TREE_TYPE (f);
5778 return (DECL_CHAIN (f) == NULL
5779 && TREE_CODE (tf) == ARRAY_TYPE
5780 && TYPE_DOMAIN (tf)
5781 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5782 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5783 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5784 && int_size_in_bytes (type) >= 0);
5787 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5788 must have in order for it to completely initialize a value of type TYPE.
5789 Return -1 if the number isn't known.
5791 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5793 static HOST_WIDE_INT
5794 count_type_elements (const_tree type, bool for_ctor_p)
5796 switch (TREE_CODE (type))
5798 case ARRAY_TYPE:
5800 tree nelts;
5802 nelts = array_type_nelts (type);
5803 if (nelts && tree_fits_uhwi_p (nelts))
5805 unsigned HOST_WIDE_INT n;
5807 n = tree_to_uhwi (nelts) + 1;
5808 if (n == 0 || for_ctor_p)
5809 return n;
5810 else
5811 return n * count_type_elements (TREE_TYPE (type), false);
5813 return for_ctor_p ? -1 : 1;
5816 case RECORD_TYPE:
5818 unsigned HOST_WIDE_INT n;
5819 tree f;
5821 n = 0;
5822 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5823 if (TREE_CODE (f) == FIELD_DECL)
5825 if (!for_ctor_p)
5826 n += count_type_elements (TREE_TYPE (f), false);
5827 else if (!flexible_array_member_p (f, type))
5828 /* Don't count flexible arrays, which are not supposed
5829 to be initialized. */
5830 n += 1;
5833 return n;
5836 case UNION_TYPE:
5837 case QUAL_UNION_TYPE:
5839 tree f;
5840 HOST_WIDE_INT n, m;
5842 gcc_assert (!for_ctor_p);
5843 /* Estimate the number of scalars in each field and pick the
5844 maximum. Other estimates would do instead; the idea is simply
5845 to make sure that the estimate is not sensitive to the ordering
5846 of the fields. */
5847 n = 1;
5848 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5849 if (TREE_CODE (f) == FIELD_DECL)
5851 m = count_type_elements (TREE_TYPE (f), false);
5852 /* If the field doesn't span the whole union, add an extra
5853 scalar for the rest. */
5854 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5855 TYPE_SIZE (type)) != 1)
5856 m++;
5857 if (n < m)
5858 n = m;
5860 return n;
5863 case COMPLEX_TYPE:
5864 return 2;
5866 case VECTOR_TYPE:
5867 return TYPE_VECTOR_SUBPARTS (type);
5869 case INTEGER_TYPE:
5870 case REAL_TYPE:
5871 case FIXED_POINT_TYPE:
5872 case ENUMERAL_TYPE:
5873 case BOOLEAN_TYPE:
5874 case POINTER_TYPE:
5875 case OFFSET_TYPE:
5876 case REFERENCE_TYPE:
5877 case NULLPTR_TYPE:
5878 return 1;
5880 case ERROR_MARK:
5881 return 0;
5883 case VOID_TYPE:
5884 case METHOD_TYPE:
5885 case FUNCTION_TYPE:
5886 case LANG_TYPE:
5887 default:
5888 gcc_unreachable ();
5892 /* Helper for categorize_ctor_elements. Identical interface. */
5894 static bool
5895 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5896 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5898 unsigned HOST_WIDE_INT idx;
5899 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5900 tree value, purpose, elt_type;
5902 /* Whether CTOR is a valid constant initializer, in accordance with what
5903 initializer_constant_valid_p does. If inferred from the constructor
5904 elements, true until proven otherwise. */
5905 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5906 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5908 nz_elts = 0;
5909 init_elts = 0;
5910 num_fields = 0;
5911 elt_type = NULL_TREE;
5913 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5915 HOST_WIDE_INT mult = 1;
5917 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5919 tree lo_index = TREE_OPERAND (purpose, 0);
5920 tree hi_index = TREE_OPERAND (purpose, 1);
5922 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5923 mult = (tree_to_uhwi (hi_index)
5924 - tree_to_uhwi (lo_index) + 1);
5926 num_fields += mult;
5927 elt_type = TREE_TYPE (value);
5929 switch (TREE_CODE (value))
5931 case CONSTRUCTOR:
5933 HOST_WIDE_INT nz = 0, ic = 0;
5935 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5936 p_complete);
5938 nz_elts += mult * nz;
5939 init_elts += mult * ic;
5941 if (const_from_elts_p && const_p)
5942 const_p = const_elt_p;
5944 break;
5946 case INTEGER_CST:
5947 case REAL_CST:
5948 case FIXED_CST:
5949 if (!initializer_zerop (value))
5950 nz_elts += mult;
5951 init_elts += mult;
5952 break;
5954 case STRING_CST:
5955 nz_elts += mult * TREE_STRING_LENGTH (value);
5956 init_elts += mult * TREE_STRING_LENGTH (value);
5957 break;
5959 case COMPLEX_CST:
5960 if (!initializer_zerop (TREE_REALPART (value)))
5961 nz_elts += mult;
5962 if (!initializer_zerop (TREE_IMAGPART (value)))
5963 nz_elts += mult;
5964 init_elts += mult;
5965 break;
5967 case VECTOR_CST:
5969 unsigned i;
5970 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5972 tree v = VECTOR_CST_ELT (value, i);
5973 if (!initializer_zerop (v))
5974 nz_elts += mult;
5975 init_elts += mult;
5978 break;
5980 default:
5982 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5983 nz_elts += mult * tc;
5984 init_elts += mult * tc;
5986 if (const_from_elts_p && const_p)
5987 const_p
5988 = initializer_constant_valid_p (value,
5989 elt_type,
5990 TYPE_REVERSE_STORAGE_ORDER
5991 (TREE_TYPE (ctor)))
5992 != NULL_TREE;
5994 break;
5998 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5999 num_fields, elt_type))
6000 *p_complete = false;
6002 *p_nz_elts += nz_elts;
6003 *p_init_elts += init_elts;
6005 return const_p;
6008 /* Examine CTOR to discover:
6009 * how many scalar fields are set to nonzero values,
6010 and place it in *P_NZ_ELTS;
6011 * how many scalar fields in total are in CTOR,
6012 and place it in *P_ELT_COUNT.
6013 * whether the constructor is complete -- in the sense that every
6014 meaningful byte is explicitly given a value --
6015 and place it in *P_COMPLETE.
6017 Return whether or not CTOR is a valid static constant initializer, the same
6018 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
6020 bool
6021 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6022 HOST_WIDE_INT *p_init_elts, bool *p_complete)
6024 *p_nz_elts = 0;
6025 *p_init_elts = 0;
6026 *p_complete = true;
6028 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
6031 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6032 of which had type LAST_TYPE. Each element was itself a complete
6033 initializer, in the sense that every meaningful byte was explicitly
6034 given a value. Return true if the same is true for the constructor
6035 as a whole. */
6037 bool
6038 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6039 const_tree last_type)
6041 if (TREE_CODE (type) == UNION_TYPE
6042 || TREE_CODE (type) == QUAL_UNION_TYPE)
6044 if (num_elts == 0)
6045 return false;
6047 gcc_assert (num_elts == 1 && last_type);
6049 /* ??? We could look at each element of the union, and find the
6050 largest element. Which would avoid comparing the size of the
6051 initialized element against any tail padding in the union.
6052 Doesn't seem worth the effort... */
6053 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6056 return count_type_elements (type, true) == num_elts;
6059 /* Return 1 if EXP contains mostly (3/4) zeros. */
6061 static int
6062 mostly_zeros_p (const_tree exp)
6064 if (TREE_CODE (exp) == CONSTRUCTOR)
6066 HOST_WIDE_INT nz_elts, init_elts;
6067 bool complete_p;
6069 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6070 return !complete_p || nz_elts < init_elts / 4;
6073 return initializer_zerop (exp);
6076 /* Return 1 if EXP contains all zeros. */
6078 static int
6079 all_zeros_p (const_tree exp)
6081 if (TREE_CODE (exp) == CONSTRUCTOR)
6083 HOST_WIDE_INT nz_elts, init_elts;
6084 bool complete_p;
6086 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6087 return nz_elts == 0;
6090 return initializer_zerop (exp);
6093 /* Helper function for store_constructor.
6094 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6095 CLEARED is as for store_constructor.
6096 ALIAS_SET is the alias set to use for any stores.
6097 If REVERSE is true, the store is to be done in reverse order.
6099 This provides a recursive shortcut back to store_constructor when it isn't
6100 necessary to go through store_field. This is so that we can pass through
6101 the cleared field to let store_constructor know that we may not have to
6102 clear a substructure if the outer structure has already been cleared. */
6104 static void
6105 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
6106 HOST_WIDE_INT bitpos,
6107 unsigned HOST_WIDE_INT bitregion_start,
6108 unsigned HOST_WIDE_INT bitregion_end,
6109 machine_mode mode,
6110 tree exp, int cleared,
6111 alias_set_type alias_set, bool reverse)
6113 if (TREE_CODE (exp) == CONSTRUCTOR
6114 /* We can only call store_constructor recursively if the size and
6115 bit position are on a byte boundary. */
6116 && bitpos % BITS_PER_UNIT == 0
6117 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
6118 /* If we have a nonzero bitpos for a register target, then we just
6119 let store_field do the bitfield handling. This is unlikely to
6120 generate unnecessary clear instructions anyways. */
6121 && (bitpos == 0 || MEM_P (target)))
6123 if (MEM_P (target))
6124 target
6125 = adjust_address (target,
6126 GET_MODE (target) == BLKmode
6127 || (bitpos
6128 % GET_MODE_ALIGNMENT (GET_MODE (target))) != 0
6129 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
6132 /* Update the alias set, if required. */
6133 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6134 && MEM_ALIAS_SET (target) != 0)
6136 target = copy_rtx (target);
6137 set_mem_alias_set (target, alias_set);
6140 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
6141 reverse);
6143 else
6144 store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6145 exp, alias_set, false, reverse);
6149 /* Returns the number of FIELD_DECLs in TYPE. */
6151 static int
6152 fields_length (const_tree type)
6154 tree t = TYPE_FIELDS (type);
6155 int count = 0;
6157 for (; t; t = DECL_CHAIN (t))
6158 if (TREE_CODE (t) == FIELD_DECL)
6159 ++count;
6161 return count;
6165 /* Store the value of constructor EXP into the rtx TARGET.
6166 TARGET is either a REG or a MEM; we know it cannot conflict, since
6167 safe_from_p has been called.
6168 CLEARED is true if TARGET is known to have been zero'd.
6169 SIZE is the number of bytes of TARGET we are allowed to modify: this
6170 may not be the same as the size of EXP if we are assigning to a field
6171 which has been packed to exclude padding bits.
6172 If REVERSE is true, the store is to be done in reverse order. */
6174 static void
6175 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
6176 bool reverse)
6178 tree type = TREE_TYPE (exp);
6179 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6180 HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0;
6182 switch (TREE_CODE (type))
6184 case RECORD_TYPE:
6185 case UNION_TYPE:
6186 case QUAL_UNION_TYPE:
6188 unsigned HOST_WIDE_INT idx;
6189 tree field, value;
6191 /* The storage order is specified for every aggregate type. */
6192 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6194 /* If size is zero or the target is already cleared, do nothing. */
6195 if (size == 0 || cleared)
6196 cleared = 1;
6197 /* We either clear the aggregate or indicate the value is dead. */
6198 else if ((TREE_CODE (type) == UNION_TYPE
6199 || TREE_CODE (type) == QUAL_UNION_TYPE)
6200 && ! CONSTRUCTOR_ELTS (exp))
6201 /* If the constructor is empty, clear the union. */
6203 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6204 cleared = 1;
6207 /* If we are building a static constructor into a register,
6208 set the initial value as zero so we can fold the value into
6209 a constant. But if more than one register is involved,
6210 this probably loses. */
6211 else if (REG_P (target) && TREE_STATIC (exp)
6212 && (GET_MODE_SIZE (GET_MODE (target))
6213 <= REGMODE_NATURAL_SIZE (GET_MODE (target))))
6215 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6216 cleared = 1;
6219 /* If the constructor has fewer fields than the structure or
6220 if we are initializing the structure to mostly zeros, clear
6221 the whole structure first. Don't do this if TARGET is a
6222 register whose mode size isn't equal to SIZE since
6223 clear_storage can't handle this case. */
6224 else if (size > 0
6225 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6226 || mostly_zeros_p (exp))
6227 && (!REG_P (target)
6228 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6229 == size)))
6231 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6232 cleared = 1;
6235 if (REG_P (target) && !cleared)
6236 emit_clobber (target);
6238 /* Store each element of the constructor into the
6239 corresponding field of TARGET. */
6240 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6242 machine_mode mode;
6243 HOST_WIDE_INT bitsize;
6244 HOST_WIDE_INT bitpos = 0;
6245 tree offset;
6246 rtx to_rtx = target;
6248 /* Just ignore missing fields. We cleared the whole
6249 structure, above, if any fields are missing. */
6250 if (field == 0)
6251 continue;
6253 if (cleared && initializer_zerop (value))
6254 continue;
6256 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6257 bitsize = tree_to_uhwi (DECL_SIZE (field));
6258 else
6259 gcc_unreachable ();
6261 mode = DECL_MODE (field);
6262 if (DECL_BIT_FIELD (field))
6263 mode = VOIDmode;
6265 offset = DECL_FIELD_OFFSET (field);
6266 if (tree_fits_shwi_p (offset)
6267 && tree_fits_shwi_p (bit_position (field)))
6269 bitpos = int_bit_position (field);
6270 offset = NULL_TREE;
6272 else
6273 gcc_unreachable ();
6275 /* If this initializes a field that is smaller than a
6276 word, at the start of a word, try to widen it to a full
6277 word. This special case allows us to output C++ member
6278 function initializations in a form that the optimizers
6279 can understand. */
6280 if (WORD_REGISTER_OPERATIONS
6281 && REG_P (target)
6282 && bitsize < BITS_PER_WORD
6283 && bitpos % BITS_PER_WORD == 0
6284 && GET_MODE_CLASS (mode) == MODE_INT
6285 && TREE_CODE (value) == INTEGER_CST
6286 && exp_size >= 0
6287 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6289 tree type = TREE_TYPE (value);
6291 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6293 type = lang_hooks.types.type_for_mode
6294 (word_mode, TYPE_UNSIGNED (type));
6295 value = fold_convert (type, value);
6296 /* Make sure the bits beyond the original bitsize are zero
6297 so that we can correctly avoid extra zeroing stores in
6298 later constructor elements. */
6299 tree bitsize_mask
6300 = wide_int_to_tree (type, wi::mask (bitsize, false,
6301 BITS_PER_WORD));
6302 value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6305 if (BYTES_BIG_ENDIAN)
6306 value
6307 = fold_build2 (LSHIFT_EXPR, type, value,
6308 build_int_cst (type,
6309 BITS_PER_WORD - bitsize));
6310 bitsize = BITS_PER_WORD;
6311 mode = word_mode;
6314 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6315 && DECL_NONADDRESSABLE_P (field))
6317 to_rtx = copy_rtx (to_rtx);
6318 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6321 store_constructor_field (to_rtx, bitsize, bitpos,
6322 0, bitregion_end, mode,
6323 value, cleared,
6324 get_alias_set (TREE_TYPE (field)),
6325 reverse);
6327 break;
6329 case ARRAY_TYPE:
6331 tree value, index;
6332 unsigned HOST_WIDE_INT i;
6333 int need_to_clear;
6334 tree domain;
6335 tree elttype = TREE_TYPE (type);
6336 int const_bounds_p;
6337 HOST_WIDE_INT minelt = 0;
6338 HOST_WIDE_INT maxelt = 0;
6340 /* The storage order is specified for every aggregate type. */
6341 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6343 domain = TYPE_DOMAIN (type);
6344 const_bounds_p = (TYPE_MIN_VALUE (domain)
6345 && TYPE_MAX_VALUE (domain)
6346 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6347 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6349 /* If we have constant bounds for the range of the type, get them. */
6350 if (const_bounds_p)
6352 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6353 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6356 /* If the constructor has fewer elements than the array, clear
6357 the whole array first. Similarly if this is static
6358 constructor of a non-BLKmode object. */
6359 if (cleared)
6360 need_to_clear = 0;
6361 else if (REG_P (target) && TREE_STATIC (exp))
6362 need_to_clear = 1;
6363 else
6365 unsigned HOST_WIDE_INT idx;
6366 tree index, value;
6367 HOST_WIDE_INT count = 0, zero_count = 0;
6368 need_to_clear = ! const_bounds_p;
6370 /* This loop is a more accurate version of the loop in
6371 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6372 is also needed to check for missing elements. */
6373 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6375 HOST_WIDE_INT this_node_count;
6377 if (need_to_clear)
6378 break;
6380 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6382 tree lo_index = TREE_OPERAND (index, 0);
6383 tree hi_index = TREE_OPERAND (index, 1);
6385 if (! tree_fits_uhwi_p (lo_index)
6386 || ! tree_fits_uhwi_p (hi_index))
6388 need_to_clear = 1;
6389 break;
6392 this_node_count = (tree_to_uhwi (hi_index)
6393 - tree_to_uhwi (lo_index) + 1);
6395 else
6396 this_node_count = 1;
6398 count += this_node_count;
6399 if (mostly_zeros_p (value))
6400 zero_count += this_node_count;
6403 /* Clear the entire array first if there are any missing
6404 elements, or if the incidence of zero elements is >=
6405 75%. */
6406 if (! need_to_clear
6407 && (count < maxelt - minelt + 1
6408 || 4 * zero_count >= 3 * count))
6409 need_to_clear = 1;
6412 if (need_to_clear && size > 0)
6414 if (REG_P (target))
6415 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6416 else
6417 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6418 cleared = 1;
6421 if (!cleared && REG_P (target))
6422 /* Inform later passes that the old value is dead. */
6423 emit_clobber (target);
6425 /* Store each element of the constructor into the
6426 corresponding element of TARGET, determined by counting the
6427 elements. */
6428 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6430 machine_mode mode;
6431 HOST_WIDE_INT bitsize;
6432 HOST_WIDE_INT bitpos;
6433 rtx xtarget = target;
6435 if (cleared && initializer_zerop (value))
6436 continue;
6438 mode = TYPE_MODE (elttype);
6439 if (mode == BLKmode)
6440 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6441 ? tree_to_uhwi (TYPE_SIZE (elttype))
6442 : -1);
6443 else
6444 bitsize = GET_MODE_BITSIZE (mode);
6446 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6448 tree lo_index = TREE_OPERAND (index, 0);
6449 tree hi_index = TREE_OPERAND (index, 1);
6450 rtx index_r, pos_rtx;
6451 HOST_WIDE_INT lo, hi, count;
6452 tree position;
6454 /* If the range is constant and "small", unroll the loop. */
6455 if (const_bounds_p
6456 && tree_fits_shwi_p (lo_index)
6457 && tree_fits_shwi_p (hi_index)
6458 && (lo = tree_to_shwi (lo_index),
6459 hi = tree_to_shwi (hi_index),
6460 count = hi - lo + 1,
6461 (!MEM_P (target)
6462 || count <= 2
6463 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6464 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6465 <= 40 * 8)))))
6467 lo -= minelt; hi -= minelt;
6468 for (; lo <= hi; lo++)
6470 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6472 if (MEM_P (target)
6473 && !MEM_KEEP_ALIAS_SET_P (target)
6474 && TREE_CODE (type) == ARRAY_TYPE
6475 && TYPE_NONALIASED_COMPONENT (type))
6477 target = copy_rtx (target);
6478 MEM_KEEP_ALIAS_SET_P (target) = 1;
6481 store_constructor_field
6482 (target, bitsize, bitpos, 0, bitregion_end,
6483 mode, value, cleared,
6484 get_alias_set (elttype), reverse);
6487 else
6489 rtx_code_label *loop_start = gen_label_rtx ();
6490 rtx_code_label *loop_end = gen_label_rtx ();
6491 tree exit_cond;
6493 expand_normal (hi_index);
6495 index = build_decl (EXPR_LOCATION (exp),
6496 VAR_DECL, NULL_TREE, domain);
6497 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6498 SET_DECL_RTL (index, index_r);
6499 store_expr (lo_index, index_r, 0, false, reverse);
6501 /* Build the head of the loop. */
6502 do_pending_stack_adjust ();
6503 emit_label (loop_start);
6505 /* Assign value to element index. */
6506 position =
6507 fold_convert (ssizetype,
6508 fold_build2 (MINUS_EXPR,
6509 TREE_TYPE (index),
6510 index,
6511 TYPE_MIN_VALUE (domain)));
6513 position =
6514 size_binop (MULT_EXPR, position,
6515 fold_convert (ssizetype,
6516 TYPE_SIZE_UNIT (elttype)));
6518 pos_rtx = expand_normal (position);
6519 xtarget = offset_address (target, pos_rtx,
6520 highest_pow2_factor (position));
6521 xtarget = adjust_address (xtarget, mode, 0);
6522 if (TREE_CODE (value) == CONSTRUCTOR)
6523 store_constructor (value, xtarget, cleared,
6524 bitsize / BITS_PER_UNIT, reverse);
6525 else
6526 store_expr (value, xtarget, 0, false, reverse);
6528 /* Generate a conditional jump to exit the loop. */
6529 exit_cond = build2 (LT_EXPR, integer_type_node,
6530 index, hi_index);
6531 jumpif (exit_cond, loop_end,
6532 profile_probability::uninitialized ());
6534 /* Update the loop counter, and jump to the head of
6535 the loop. */
6536 expand_assignment (index,
6537 build2 (PLUS_EXPR, TREE_TYPE (index),
6538 index, integer_one_node),
6539 false);
6541 emit_jump (loop_start);
6543 /* Build the end of the loop. */
6544 emit_label (loop_end);
6547 else if ((index != 0 && ! tree_fits_shwi_p (index))
6548 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6550 tree position;
6552 if (index == 0)
6553 index = ssize_int (1);
6555 if (minelt)
6556 index = fold_convert (ssizetype,
6557 fold_build2 (MINUS_EXPR,
6558 TREE_TYPE (index),
6559 index,
6560 TYPE_MIN_VALUE (domain)));
6562 position =
6563 size_binop (MULT_EXPR, index,
6564 fold_convert (ssizetype,
6565 TYPE_SIZE_UNIT (elttype)));
6566 xtarget = offset_address (target,
6567 expand_normal (position),
6568 highest_pow2_factor (position));
6569 xtarget = adjust_address (xtarget, mode, 0);
6570 store_expr (value, xtarget, 0, false, reverse);
6572 else
6574 if (index != 0)
6575 bitpos = ((tree_to_shwi (index) - minelt)
6576 * tree_to_uhwi (TYPE_SIZE (elttype)));
6577 else
6578 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6580 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6581 && TREE_CODE (type) == ARRAY_TYPE
6582 && TYPE_NONALIASED_COMPONENT (type))
6584 target = copy_rtx (target);
6585 MEM_KEEP_ALIAS_SET_P (target) = 1;
6587 store_constructor_field (target, bitsize, bitpos, 0,
6588 bitregion_end, mode, value,
6589 cleared, get_alias_set (elttype),
6590 reverse);
6593 break;
6596 case VECTOR_TYPE:
6598 unsigned HOST_WIDE_INT idx;
6599 constructor_elt *ce;
6600 int i;
6601 int need_to_clear;
6602 insn_code icode = CODE_FOR_nothing;
6603 tree elt;
6604 tree elttype = TREE_TYPE (type);
6605 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6606 machine_mode eltmode = TYPE_MODE (elttype);
6607 HOST_WIDE_INT bitsize;
6608 HOST_WIDE_INT bitpos;
6609 rtvec vector = NULL;
6610 unsigned n_elts;
6611 alias_set_type alias;
6612 bool vec_vec_init_p = false;
6613 machine_mode mode = GET_MODE (target);
6615 gcc_assert (eltmode != BLKmode);
6617 /* Try using vec_duplicate_optab for uniform vectors. */
6618 if (!TREE_SIDE_EFFECTS (exp)
6619 && VECTOR_MODE_P (mode)
6620 && eltmode == GET_MODE_INNER (mode)
6621 && ((icode = optab_handler (vec_duplicate_optab, mode))
6622 != CODE_FOR_nothing)
6623 && (elt = uniform_vector_p (exp)))
6625 struct expand_operand ops[2];
6626 create_output_operand (&ops[0], target, mode);
6627 create_input_operand (&ops[1], expand_normal (elt), eltmode);
6628 expand_insn (icode, 2, ops);
6629 if (!rtx_equal_p (target, ops[0].value))
6630 emit_move_insn (target, ops[0].value);
6631 break;
6634 n_elts = TYPE_VECTOR_SUBPARTS (type);
6635 if (REG_P (target) && VECTOR_MODE_P (mode))
6637 machine_mode emode = eltmode;
6639 if (CONSTRUCTOR_NELTS (exp)
6640 && (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6641 == VECTOR_TYPE))
6643 tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6644 gcc_assert (CONSTRUCTOR_NELTS (exp) * TYPE_VECTOR_SUBPARTS (etype)
6645 == n_elts);
6646 emode = TYPE_MODE (etype);
6648 icode = convert_optab_handler (vec_init_optab, mode, emode);
6649 if (icode != CODE_FOR_nothing)
6651 unsigned int i, n = n_elts;
6653 if (emode != eltmode)
6655 n = CONSTRUCTOR_NELTS (exp);
6656 vec_vec_init_p = true;
6658 vector = rtvec_alloc (n);
6659 for (i = 0; i < n; i++)
6660 RTVEC_ELT (vector, i) = CONST0_RTX (emode);
6664 /* If the constructor has fewer elements than the vector,
6665 clear the whole array first. Similarly if this is static
6666 constructor of a non-BLKmode object. */
6667 if (cleared)
6668 need_to_clear = 0;
6669 else if (REG_P (target) && TREE_STATIC (exp))
6670 need_to_clear = 1;
6671 else
6673 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6674 tree value;
6676 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6678 tree sz = TYPE_SIZE (TREE_TYPE (value));
6679 int n_elts_here
6680 = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR, sz,
6681 TYPE_SIZE (elttype)));
6683 count += n_elts_here;
6684 if (mostly_zeros_p (value))
6685 zero_count += n_elts_here;
6688 /* Clear the entire vector first if there are any missing elements,
6689 or if the incidence of zero elements is >= 75%. */
6690 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6693 if (need_to_clear && size > 0 && !vector)
6695 if (REG_P (target))
6696 emit_move_insn (target, CONST0_RTX (mode));
6697 else
6698 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6699 cleared = 1;
6702 /* Inform later passes that the old value is dead. */
6703 if (!cleared && !vector && REG_P (target))
6704 emit_move_insn (target, CONST0_RTX (mode));
6706 if (MEM_P (target))
6707 alias = MEM_ALIAS_SET (target);
6708 else
6709 alias = get_alias_set (elttype);
6711 /* Store each element of the constructor into the corresponding
6712 element of TARGET, determined by counting the elements. */
6713 for (idx = 0, i = 0;
6714 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6715 idx++, i += bitsize / elt_size)
6717 HOST_WIDE_INT eltpos;
6718 tree value = ce->value;
6720 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6721 if (cleared && initializer_zerop (value))
6722 continue;
6724 if (ce->index)
6725 eltpos = tree_to_uhwi (ce->index);
6726 else
6727 eltpos = i;
6729 if (vector)
6731 if (vec_vec_init_p)
6733 gcc_assert (ce->index == NULL_TREE);
6734 gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
6735 eltpos = idx;
6737 else
6738 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6739 RTVEC_ELT (vector, eltpos) = expand_normal (value);
6741 else
6743 machine_mode value_mode
6744 = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6745 ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
6746 bitpos = eltpos * elt_size;
6747 store_constructor_field (target, bitsize, bitpos, 0,
6748 bitregion_end, value_mode,
6749 value, cleared, alias, reverse);
6753 if (vector)
6754 emit_insn (GEN_FCN (icode) (target,
6755 gen_rtx_PARALLEL (mode, vector)));
6756 break;
6759 default:
6760 gcc_unreachable ();
6764 /* Store the value of EXP (an expression tree)
6765 into a subfield of TARGET which has mode MODE and occupies
6766 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6767 If MODE is VOIDmode, it means that we are storing into a bit-field.
6769 BITREGION_START is bitpos of the first bitfield in this region.
6770 BITREGION_END is the bitpos of the ending bitfield in this region.
6771 These two fields are 0, if the C++ memory model does not apply,
6772 or we are not interested in keeping track of bitfield regions.
6774 Always return const0_rtx unless we have something particular to
6775 return.
6777 ALIAS_SET is the alias set for the destination. This value will
6778 (in general) be different from that for TARGET, since TARGET is a
6779 reference to the containing structure.
6781 If NONTEMPORAL is true, try generating a nontemporal store.
6783 If REVERSE is true, the store is to be done in reverse order. */
6785 static rtx
6786 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6787 unsigned HOST_WIDE_INT bitregion_start,
6788 unsigned HOST_WIDE_INT bitregion_end,
6789 machine_mode mode, tree exp,
6790 alias_set_type alias_set, bool nontemporal, bool reverse)
6792 if (TREE_CODE (exp) == ERROR_MARK)
6793 return const0_rtx;
6795 /* If we have nothing to store, do nothing unless the expression has
6796 side-effects. Don't do that for zero sized addressable lhs of
6797 calls. */
6798 if (bitsize == 0
6799 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6800 || TREE_CODE (exp) != CALL_EXPR))
6801 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6803 if (GET_CODE (target) == CONCAT)
6805 /* We're storing into a struct containing a single __complex. */
6807 gcc_assert (!bitpos);
6808 return store_expr (exp, target, 0, nontemporal, reverse);
6811 /* If the structure is in a register or if the component
6812 is a bit field, we cannot use addressing to access it.
6813 Use bit-field techniques or SUBREG to store in it. */
6815 if (mode == VOIDmode
6816 || (mode != BLKmode && ! direct_store[(int) mode]
6817 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6818 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6819 || REG_P (target)
6820 || GET_CODE (target) == SUBREG
6821 /* If the field isn't aligned enough to store as an ordinary memref,
6822 store it as a bit field. */
6823 || (mode != BLKmode
6824 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6825 || bitpos % GET_MODE_ALIGNMENT (mode))
6826 && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
6827 || (bitpos % BITS_PER_UNIT != 0)))
6828 || (bitsize >= 0 && mode != BLKmode
6829 && GET_MODE_BITSIZE (mode) > bitsize)
6830 /* If the RHS and field are a constant size and the size of the
6831 RHS isn't the same size as the bitfield, we must use bitfield
6832 operations. */
6833 || (bitsize >= 0
6834 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6835 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6836 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6837 we will handle specially below. */
6838 && !(TREE_CODE (exp) == CONSTRUCTOR
6839 && bitsize % BITS_PER_UNIT == 0)
6840 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6841 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6842 includes some extra padding. store_expr / expand_expr will in
6843 that case call get_inner_reference that will have the bitsize
6844 we check here and thus the block move will not clobber the
6845 padding that shouldn't be clobbered. In the future we could
6846 replace the TREE_ADDRESSABLE check with a check that
6847 get_base_address needs to live in memory. */
6848 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6849 || TREE_CODE (exp) != COMPONENT_REF
6850 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST
6851 || (bitsize % BITS_PER_UNIT != 0)
6852 || (bitpos % BITS_PER_UNIT != 0)
6853 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize)
6854 != 0)))
6855 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6856 decl we must use bitfield operations. */
6857 || (bitsize >= 0
6858 && TREE_CODE (exp) == MEM_REF
6859 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6860 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6861 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6862 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6864 rtx temp;
6865 gimple *nop_def;
6867 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6868 implies a mask operation. If the precision is the same size as
6869 the field we're storing into, that mask is redundant. This is
6870 particularly common with bit field assignments generated by the
6871 C front end. */
6872 nop_def = get_def_for_expr (exp, NOP_EXPR);
6873 if (nop_def)
6875 tree type = TREE_TYPE (exp);
6876 if (INTEGRAL_TYPE_P (type)
6877 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6878 && bitsize == TYPE_PRECISION (type))
6880 tree op = gimple_assign_rhs1 (nop_def);
6881 type = TREE_TYPE (op);
6882 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6883 exp = op;
6887 temp = expand_normal (exp);
6889 /* Handle calls that return values in multiple non-contiguous locations.
6890 The Irix 6 ABI has examples of this. */
6891 if (GET_CODE (temp) == PARALLEL)
6893 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6894 scalar_int_mode temp_mode
6895 = smallest_int_mode_for_size (size * BITS_PER_UNIT);
6896 rtx temp_target = gen_reg_rtx (temp_mode);
6897 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6898 temp = temp_target;
6901 /* Handle calls that return BLKmode values in registers. */
6902 else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6904 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6905 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6906 temp = temp_target;
6909 /* If the value has aggregate type and an integral mode then, if BITSIZE
6910 is narrower than this mode and this is for big-endian data, we first
6911 need to put the value into the low-order bits for store_bit_field,
6912 except when MODE is BLKmode and BITSIZE larger than the word size
6913 (see the handling of fields larger than a word in store_bit_field).
6914 Moreover, the field may be not aligned on a byte boundary; in this
6915 case, if it has reverse storage order, it needs to be accessed as a
6916 scalar field with reverse storage order and we must first put the
6917 value into target order. */
6918 scalar_int_mode temp_mode;
6919 if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6920 && is_int_mode (GET_MODE (temp), &temp_mode))
6922 HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
6924 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6926 if (reverse)
6927 temp = flip_storage_order (temp_mode, temp);
6929 if (bitsize < size
6930 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
6931 && !(mode == BLKmode && bitsize > BITS_PER_WORD))
6932 temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
6933 size - bitsize, NULL_RTX, 1);
6936 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6937 if (mode != VOIDmode && mode != BLKmode
6938 && mode != TYPE_MODE (TREE_TYPE (exp)))
6939 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6941 /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6942 and BITPOS must be aligned on a byte boundary. If so, we simply do
6943 a block copy. Likewise for a BLKmode-like TARGET. */
6944 if (GET_MODE (temp) == BLKmode
6945 && (GET_MODE (target) == BLKmode
6946 || (MEM_P (target)
6947 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6948 && (bitpos % BITS_PER_UNIT) == 0
6949 && (bitsize % BITS_PER_UNIT) == 0)))
6951 gcc_assert (MEM_P (target) && MEM_P (temp)
6952 && (bitpos % BITS_PER_UNIT) == 0);
6954 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6955 emit_block_move (target, temp,
6956 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6957 / BITS_PER_UNIT),
6958 BLOCK_OP_NORMAL);
6960 return const0_rtx;
6963 /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6964 word size, we need to load the value (see again store_bit_field). */
6965 if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD)
6967 scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
6968 temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
6969 temp_mode, false, NULL);
6972 /* Store the value in the bitfield. */
6973 store_bit_field (target, bitsize, bitpos,
6974 bitregion_start, bitregion_end,
6975 mode, temp, reverse);
6977 return const0_rtx;
6979 else
6981 /* Now build a reference to just the desired component. */
6982 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6984 if (to_rtx == target)
6985 to_rtx = copy_rtx (to_rtx);
6987 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6988 set_mem_alias_set (to_rtx, alias_set);
6990 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6991 into a target smaller than its type; handle that case now. */
6992 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6994 gcc_assert (bitsize % BITS_PER_UNIT == 0);
6995 store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
6996 return to_rtx;
6999 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7003 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7004 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7005 codes and find the ultimate containing object, which we return.
7007 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7008 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7009 storage order of the field.
7010 If the position of the field is variable, we store a tree
7011 giving the variable offset (in units) in *POFFSET.
7012 This offset is in addition to the bit position.
7013 If the position is not variable, we store 0 in *POFFSET.
7015 If any of the extraction expressions is volatile,
7016 we store 1 in *PVOLATILEP. Otherwise we don't change that.
7018 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7019 Otherwise, it is a mode that can be used to access the field.
7021 If the field describes a variable-sized object, *PMODE is set to
7022 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
7023 this case, but the address of the object can be found. */
7025 tree
7026 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
7027 HOST_WIDE_INT *pbitpos, tree *poffset,
7028 machine_mode *pmode, int *punsignedp,
7029 int *preversep, int *pvolatilep)
7031 tree size_tree = 0;
7032 machine_mode mode = VOIDmode;
7033 bool blkmode_bitfield = false;
7034 tree offset = size_zero_node;
7035 offset_int bit_offset = 0;
7037 /* First get the mode, signedness, storage order and size. We do this from
7038 just the outermost expression. */
7039 *pbitsize = -1;
7040 if (TREE_CODE (exp) == COMPONENT_REF)
7042 tree field = TREE_OPERAND (exp, 1);
7043 size_tree = DECL_SIZE (field);
7044 if (flag_strict_volatile_bitfields > 0
7045 && TREE_THIS_VOLATILE (exp)
7046 && DECL_BIT_FIELD_TYPE (field)
7047 && DECL_MODE (field) != BLKmode)
7048 /* Volatile bitfields should be accessed in the mode of the
7049 field's type, not the mode computed based on the bit
7050 size. */
7051 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7052 else if (!DECL_BIT_FIELD (field))
7054 mode = DECL_MODE (field);
7055 /* For vector fields re-check the target flags, as DECL_MODE
7056 could have been set with different target flags than
7057 the current function has. */
7058 if (mode == BLKmode
7059 && VECTOR_TYPE_P (TREE_TYPE (field))
7060 && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7061 mode = TYPE_MODE (TREE_TYPE (field));
7063 else if (DECL_MODE (field) == BLKmode)
7064 blkmode_bitfield = true;
7066 *punsignedp = DECL_UNSIGNED (field);
7068 else if (TREE_CODE (exp) == BIT_FIELD_REF)
7070 size_tree = TREE_OPERAND (exp, 1);
7071 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7072 || TYPE_UNSIGNED (TREE_TYPE (exp)));
7074 /* For vector types, with the correct size of access, use the mode of
7075 inner type. */
7076 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7077 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7078 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7079 mode = TYPE_MODE (TREE_TYPE (exp));
7081 else
7083 mode = TYPE_MODE (TREE_TYPE (exp));
7084 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7086 if (mode == BLKmode)
7087 size_tree = TYPE_SIZE (TREE_TYPE (exp));
7088 else
7089 *pbitsize = GET_MODE_BITSIZE (mode);
7092 if (size_tree != 0)
7094 if (! tree_fits_uhwi_p (size_tree))
7095 mode = BLKmode, *pbitsize = -1;
7096 else
7097 *pbitsize = tree_to_uhwi (size_tree);
7100 *preversep = reverse_storage_order_for_component_p (exp);
7102 /* Compute cumulative bit-offset for nested component-refs and array-refs,
7103 and find the ultimate containing object. */
7104 while (1)
7106 switch (TREE_CODE (exp))
7108 case BIT_FIELD_REF:
7109 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
7110 break;
7112 case COMPONENT_REF:
7114 tree field = TREE_OPERAND (exp, 1);
7115 tree this_offset = component_ref_field_offset (exp);
7117 /* If this field hasn't been filled in yet, don't go past it.
7118 This should only happen when folding expressions made during
7119 type construction. */
7120 if (this_offset == 0)
7121 break;
7123 offset = size_binop (PLUS_EXPR, offset, this_offset);
7124 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
7126 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
7128 break;
7130 case ARRAY_REF:
7131 case ARRAY_RANGE_REF:
7133 tree index = TREE_OPERAND (exp, 1);
7134 tree low_bound = array_ref_low_bound (exp);
7135 tree unit_size = array_ref_element_size (exp);
7137 /* We assume all arrays have sizes that are a multiple of a byte.
7138 First subtract the lower bound, if any, in the type of the
7139 index, then convert to sizetype and multiply by the size of
7140 the array element. */
7141 if (! integer_zerop (low_bound))
7142 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7143 index, low_bound);
7145 offset = size_binop (PLUS_EXPR, offset,
7146 size_binop (MULT_EXPR,
7147 fold_convert (sizetype, index),
7148 unit_size));
7150 break;
7152 case REALPART_EXPR:
7153 break;
7155 case IMAGPART_EXPR:
7156 bit_offset += *pbitsize;
7157 break;
7159 case VIEW_CONVERT_EXPR:
7160 break;
7162 case MEM_REF:
7163 /* Hand back the decl for MEM[&decl, off]. */
7164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7166 tree off = TREE_OPERAND (exp, 1);
7167 if (!integer_zerop (off))
7169 offset_int boff, coff = mem_ref_offset (exp);
7170 boff = coff << LOG2_BITS_PER_UNIT;
7171 bit_offset += boff;
7173 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7175 goto done;
7177 default:
7178 goto done;
7181 /* If any reference in the chain is volatile, the effect is volatile. */
7182 if (TREE_THIS_VOLATILE (exp))
7183 *pvolatilep = 1;
7185 exp = TREE_OPERAND (exp, 0);
7187 done:
7189 /* If OFFSET is constant, see if we can return the whole thing as a
7190 constant bit position. Make sure to handle overflow during
7191 this conversion. */
7192 if (TREE_CODE (offset) == INTEGER_CST)
7194 offset_int tem = wi::sext (wi::to_offset (offset),
7195 TYPE_PRECISION (sizetype));
7196 tem <<= LOG2_BITS_PER_UNIT;
7197 tem += bit_offset;
7198 if (wi::fits_shwi_p (tem))
7200 *pbitpos = tem.to_shwi ();
7201 *poffset = offset = NULL_TREE;
7205 /* Otherwise, split it up. */
7206 if (offset)
7208 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7209 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7211 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7212 offset_int tem = wi::bit_and_not (bit_offset, mask);
7213 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7214 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7215 bit_offset -= tem;
7216 tem >>= LOG2_BITS_PER_UNIT;
7217 offset = size_binop (PLUS_EXPR, offset,
7218 wide_int_to_tree (sizetype, tem));
7221 *pbitpos = bit_offset.to_shwi ();
7222 *poffset = offset;
7225 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7226 if (mode == VOIDmode
7227 && blkmode_bitfield
7228 && (*pbitpos % BITS_PER_UNIT) == 0
7229 && (*pbitsize % BITS_PER_UNIT) == 0)
7230 *pmode = BLKmode;
7231 else
7232 *pmode = mode;
7234 return exp;
7237 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7239 static unsigned HOST_WIDE_INT
7240 target_align (const_tree target)
7242 /* We might have a chain of nested references with intermediate misaligning
7243 bitfields components, so need to recurse to find out. */
7245 unsigned HOST_WIDE_INT this_align, outer_align;
7247 switch (TREE_CODE (target))
7249 case BIT_FIELD_REF:
7250 return 1;
7252 case COMPONENT_REF:
7253 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7254 outer_align = target_align (TREE_OPERAND (target, 0));
7255 return MIN (this_align, outer_align);
7257 case ARRAY_REF:
7258 case ARRAY_RANGE_REF:
7259 this_align = TYPE_ALIGN (TREE_TYPE (target));
7260 outer_align = target_align (TREE_OPERAND (target, 0));
7261 return MIN (this_align, outer_align);
7263 CASE_CONVERT:
7264 case NON_LVALUE_EXPR:
7265 case VIEW_CONVERT_EXPR:
7266 this_align = TYPE_ALIGN (TREE_TYPE (target));
7267 outer_align = target_align (TREE_OPERAND (target, 0));
7268 return MAX (this_align, outer_align);
7270 default:
7271 return TYPE_ALIGN (TREE_TYPE (target));
7276 /* Given an rtx VALUE that may contain additions and multiplications, return
7277 an equivalent value that just refers to a register, memory, or constant.
7278 This is done by generating instructions to perform the arithmetic and
7279 returning a pseudo-register containing the value.
7281 The returned value may be a REG, SUBREG, MEM or constant. */
7284 force_operand (rtx value, rtx target)
7286 rtx op1, op2;
7287 /* Use subtarget as the target for operand 0 of a binary operation. */
7288 rtx subtarget = get_subtarget (target);
7289 enum rtx_code code = GET_CODE (value);
7291 /* Check for subreg applied to an expression produced by loop optimizer. */
7292 if (code == SUBREG
7293 && !REG_P (SUBREG_REG (value))
7294 && !MEM_P (SUBREG_REG (value)))
7296 value
7297 = simplify_gen_subreg (GET_MODE (value),
7298 force_reg (GET_MODE (SUBREG_REG (value)),
7299 force_operand (SUBREG_REG (value),
7300 NULL_RTX)),
7301 GET_MODE (SUBREG_REG (value)),
7302 SUBREG_BYTE (value));
7303 code = GET_CODE (value);
7306 /* Check for a PIC address load. */
7307 if ((code == PLUS || code == MINUS)
7308 && XEXP (value, 0) == pic_offset_table_rtx
7309 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7310 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7311 || GET_CODE (XEXP (value, 1)) == CONST))
7313 if (!subtarget)
7314 subtarget = gen_reg_rtx (GET_MODE (value));
7315 emit_move_insn (subtarget, value);
7316 return subtarget;
7319 if (ARITHMETIC_P (value))
7321 op2 = XEXP (value, 1);
7322 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7323 subtarget = 0;
7324 if (code == MINUS && CONST_INT_P (op2))
7326 code = PLUS;
7327 op2 = negate_rtx (GET_MODE (value), op2);
7330 /* Check for an addition with OP2 a constant integer and our first
7331 operand a PLUS of a virtual register and something else. In that
7332 case, we want to emit the sum of the virtual register and the
7333 constant first and then add the other value. This allows virtual
7334 register instantiation to simply modify the constant rather than
7335 creating another one around this addition. */
7336 if (code == PLUS && CONST_INT_P (op2)
7337 && GET_CODE (XEXP (value, 0)) == PLUS
7338 && REG_P (XEXP (XEXP (value, 0), 0))
7339 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7340 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7342 rtx temp = expand_simple_binop (GET_MODE (value), code,
7343 XEXP (XEXP (value, 0), 0), op2,
7344 subtarget, 0, OPTAB_LIB_WIDEN);
7345 return expand_simple_binop (GET_MODE (value), code, temp,
7346 force_operand (XEXP (XEXP (value,
7347 0), 1), 0),
7348 target, 0, OPTAB_LIB_WIDEN);
7351 op1 = force_operand (XEXP (value, 0), subtarget);
7352 op2 = force_operand (op2, NULL_RTX);
7353 switch (code)
7355 case MULT:
7356 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7357 case DIV:
7358 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7359 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7360 target, 1, OPTAB_LIB_WIDEN);
7361 else
7362 return expand_divmod (0,
7363 FLOAT_MODE_P (GET_MODE (value))
7364 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7365 GET_MODE (value), op1, op2, target, 0);
7366 case MOD:
7367 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7368 target, 0);
7369 case UDIV:
7370 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7371 target, 1);
7372 case UMOD:
7373 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7374 target, 1);
7375 case ASHIFTRT:
7376 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7377 target, 0, OPTAB_LIB_WIDEN);
7378 default:
7379 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7380 target, 1, OPTAB_LIB_WIDEN);
7383 if (UNARY_P (value))
7385 if (!target)
7386 target = gen_reg_rtx (GET_MODE (value));
7387 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7388 switch (code)
7390 case ZERO_EXTEND:
7391 case SIGN_EXTEND:
7392 case TRUNCATE:
7393 case FLOAT_EXTEND:
7394 case FLOAT_TRUNCATE:
7395 convert_move (target, op1, code == ZERO_EXTEND);
7396 return target;
7398 case FIX:
7399 case UNSIGNED_FIX:
7400 expand_fix (target, op1, code == UNSIGNED_FIX);
7401 return target;
7403 case FLOAT:
7404 case UNSIGNED_FLOAT:
7405 expand_float (target, op1, code == UNSIGNED_FLOAT);
7406 return target;
7408 default:
7409 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7413 #ifdef INSN_SCHEDULING
7414 /* On machines that have insn scheduling, we want all memory reference to be
7415 explicit, so we need to deal with such paradoxical SUBREGs. */
7416 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7417 value
7418 = simplify_gen_subreg (GET_MODE (value),
7419 force_reg (GET_MODE (SUBREG_REG (value)),
7420 force_operand (SUBREG_REG (value),
7421 NULL_RTX)),
7422 GET_MODE (SUBREG_REG (value)),
7423 SUBREG_BYTE (value));
7424 #endif
7426 return value;
7429 /* Subroutine of expand_expr: return nonzero iff there is no way that
7430 EXP can reference X, which is being modified. TOP_P is nonzero if this
7431 call is going to be used to determine whether we need a temporary
7432 for EXP, as opposed to a recursive call to this function.
7434 It is always safe for this routine to return zero since it merely
7435 searches for optimization opportunities. */
7438 safe_from_p (const_rtx x, tree exp, int top_p)
7440 rtx exp_rtl = 0;
7441 int i, nops;
7443 if (x == 0
7444 /* If EXP has varying size, we MUST use a target since we currently
7445 have no way of allocating temporaries of variable size
7446 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7447 So we assume here that something at a higher level has prevented a
7448 clash. This is somewhat bogus, but the best we can do. Only
7449 do this when X is BLKmode and when we are at the top level. */
7450 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7451 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7452 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7453 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7454 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7455 != INTEGER_CST)
7456 && GET_MODE (x) == BLKmode)
7457 /* If X is in the outgoing argument area, it is always safe. */
7458 || (MEM_P (x)
7459 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7460 || (GET_CODE (XEXP (x, 0)) == PLUS
7461 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7462 return 1;
7464 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7465 find the underlying pseudo. */
7466 if (GET_CODE (x) == SUBREG)
7468 x = SUBREG_REG (x);
7469 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7470 return 0;
7473 /* Now look at our tree code and possibly recurse. */
7474 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7476 case tcc_declaration:
7477 exp_rtl = DECL_RTL_IF_SET (exp);
7478 break;
7480 case tcc_constant:
7481 return 1;
7483 case tcc_exceptional:
7484 if (TREE_CODE (exp) == TREE_LIST)
7486 while (1)
7488 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7489 return 0;
7490 exp = TREE_CHAIN (exp);
7491 if (!exp)
7492 return 1;
7493 if (TREE_CODE (exp) != TREE_LIST)
7494 return safe_from_p (x, exp, 0);
7497 else if (TREE_CODE (exp) == CONSTRUCTOR)
7499 constructor_elt *ce;
7500 unsigned HOST_WIDE_INT idx;
7502 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7503 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7504 || !safe_from_p (x, ce->value, 0))
7505 return 0;
7506 return 1;
7508 else if (TREE_CODE (exp) == ERROR_MARK)
7509 return 1; /* An already-visited SAVE_EXPR? */
7510 else
7511 return 0;
7513 case tcc_statement:
7514 /* The only case we look at here is the DECL_INITIAL inside a
7515 DECL_EXPR. */
7516 return (TREE_CODE (exp) != DECL_EXPR
7517 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7518 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7519 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7521 case tcc_binary:
7522 case tcc_comparison:
7523 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7524 return 0;
7525 /* Fall through. */
7527 case tcc_unary:
7528 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7530 case tcc_expression:
7531 case tcc_reference:
7532 case tcc_vl_exp:
7533 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7534 the expression. If it is set, we conflict iff we are that rtx or
7535 both are in memory. Otherwise, we check all operands of the
7536 expression recursively. */
7538 switch (TREE_CODE (exp))
7540 case ADDR_EXPR:
7541 /* If the operand is static or we are static, we can't conflict.
7542 Likewise if we don't conflict with the operand at all. */
7543 if (staticp (TREE_OPERAND (exp, 0))
7544 || TREE_STATIC (exp)
7545 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7546 return 1;
7548 /* Otherwise, the only way this can conflict is if we are taking
7549 the address of a DECL a that address if part of X, which is
7550 very rare. */
7551 exp = TREE_OPERAND (exp, 0);
7552 if (DECL_P (exp))
7554 if (!DECL_RTL_SET_P (exp)
7555 || !MEM_P (DECL_RTL (exp)))
7556 return 0;
7557 else
7558 exp_rtl = XEXP (DECL_RTL (exp), 0);
7560 break;
7562 case MEM_REF:
7563 if (MEM_P (x)
7564 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7565 get_alias_set (exp)))
7566 return 0;
7567 break;
7569 case CALL_EXPR:
7570 /* Assume that the call will clobber all hard registers and
7571 all of memory. */
7572 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7573 || MEM_P (x))
7574 return 0;
7575 break;
7577 case WITH_CLEANUP_EXPR:
7578 case CLEANUP_POINT_EXPR:
7579 /* Lowered by gimplify.c. */
7580 gcc_unreachable ();
7582 case SAVE_EXPR:
7583 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7585 default:
7586 break;
7589 /* If we have an rtx, we do not need to scan our operands. */
7590 if (exp_rtl)
7591 break;
7593 nops = TREE_OPERAND_LENGTH (exp);
7594 for (i = 0; i < nops; i++)
7595 if (TREE_OPERAND (exp, i) != 0
7596 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7597 return 0;
7599 break;
7601 case tcc_type:
7602 /* Should never get a type here. */
7603 gcc_unreachable ();
7606 /* If we have an rtl, find any enclosed object. Then see if we conflict
7607 with it. */
7608 if (exp_rtl)
7610 if (GET_CODE (exp_rtl) == SUBREG)
7612 exp_rtl = SUBREG_REG (exp_rtl);
7613 if (REG_P (exp_rtl)
7614 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7615 return 0;
7618 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7619 are memory and they conflict. */
7620 return ! (rtx_equal_p (x, exp_rtl)
7621 || (MEM_P (x) && MEM_P (exp_rtl)
7622 && true_dependence (exp_rtl, VOIDmode, x)));
7625 /* If we reach here, it is safe. */
7626 return 1;
7630 /* Return the highest power of two that EXP is known to be a multiple of.
7631 This is used in updating alignment of MEMs in array references. */
7633 unsigned HOST_WIDE_INT
7634 highest_pow2_factor (const_tree exp)
7636 unsigned HOST_WIDE_INT ret;
7637 int trailing_zeros = tree_ctz (exp);
7638 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7639 return BIGGEST_ALIGNMENT;
7640 ret = HOST_WIDE_INT_1U << trailing_zeros;
7641 if (ret > BIGGEST_ALIGNMENT)
7642 return BIGGEST_ALIGNMENT;
7643 return ret;
7646 /* Similar, except that the alignment requirements of TARGET are
7647 taken into account. Assume it is at least as aligned as its
7648 type, unless it is a COMPONENT_REF in which case the layout of
7649 the structure gives the alignment. */
7651 static unsigned HOST_WIDE_INT
7652 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7654 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7655 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7657 return MAX (factor, talign);
7660 /* Convert the tree comparison code TCODE to the rtl one where the
7661 signedness is UNSIGNEDP. */
7663 static enum rtx_code
7664 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7666 enum rtx_code code;
7667 switch (tcode)
7669 case EQ_EXPR:
7670 code = EQ;
7671 break;
7672 case NE_EXPR:
7673 code = NE;
7674 break;
7675 case LT_EXPR:
7676 code = unsignedp ? LTU : LT;
7677 break;
7678 case LE_EXPR:
7679 code = unsignedp ? LEU : LE;
7680 break;
7681 case GT_EXPR:
7682 code = unsignedp ? GTU : GT;
7683 break;
7684 case GE_EXPR:
7685 code = unsignedp ? GEU : GE;
7686 break;
7687 case UNORDERED_EXPR:
7688 code = UNORDERED;
7689 break;
7690 case ORDERED_EXPR:
7691 code = ORDERED;
7692 break;
7693 case UNLT_EXPR:
7694 code = UNLT;
7695 break;
7696 case UNLE_EXPR:
7697 code = UNLE;
7698 break;
7699 case UNGT_EXPR:
7700 code = UNGT;
7701 break;
7702 case UNGE_EXPR:
7703 code = UNGE;
7704 break;
7705 case UNEQ_EXPR:
7706 code = UNEQ;
7707 break;
7708 case LTGT_EXPR:
7709 code = LTGT;
7710 break;
7712 default:
7713 gcc_unreachable ();
7715 return code;
7718 /* Subroutine of expand_expr. Expand the two operands of a binary
7719 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7720 The value may be stored in TARGET if TARGET is nonzero. The
7721 MODIFIER argument is as documented by expand_expr. */
7723 void
7724 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7725 enum expand_modifier modifier)
7727 if (! safe_from_p (target, exp1, 1))
7728 target = 0;
7729 if (operand_equal_p (exp0, exp1, 0))
7731 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7732 *op1 = copy_rtx (*op0);
7734 else
7736 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7737 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7742 /* Return a MEM that contains constant EXP. DEFER is as for
7743 output_constant_def and MODIFIER is as for expand_expr. */
7745 static rtx
7746 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7748 rtx mem;
7750 mem = output_constant_def (exp, defer);
7751 if (modifier != EXPAND_INITIALIZER)
7752 mem = use_anchored_address (mem);
7753 return mem;
7756 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7757 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7759 static rtx
7760 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode,
7761 enum expand_modifier modifier, addr_space_t as)
7763 rtx result, subtarget;
7764 tree inner, offset;
7765 HOST_WIDE_INT bitsize, bitpos;
7766 int unsignedp, reversep, volatilep = 0;
7767 machine_mode mode1;
7769 /* If we are taking the address of a constant and are at the top level,
7770 we have to use output_constant_def since we can't call force_const_mem
7771 at top level. */
7772 /* ??? This should be considered a front-end bug. We should not be
7773 generating ADDR_EXPR of something that isn't an LVALUE. The only
7774 exception here is STRING_CST. */
7775 if (CONSTANT_CLASS_P (exp))
7777 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7778 if (modifier < EXPAND_SUM)
7779 result = force_operand (result, target);
7780 return result;
7783 /* Everything must be something allowed by is_gimple_addressable. */
7784 switch (TREE_CODE (exp))
7786 case INDIRECT_REF:
7787 /* This case will happen via recursion for &a->b. */
7788 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7790 case MEM_REF:
7792 tree tem = TREE_OPERAND (exp, 0);
7793 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7794 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7795 return expand_expr (tem, target, tmode, modifier);
7798 case CONST_DECL:
7799 /* Expand the initializer like constants above. */
7800 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7801 0, modifier), 0);
7802 if (modifier < EXPAND_SUM)
7803 result = force_operand (result, target);
7804 return result;
7806 case REALPART_EXPR:
7807 /* The real part of the complex number is always first, therefore
7808 the address is the same as the address of the parent object. */
7809 offset = 0;
7810 bitpos = 0;
7811 inner = TREE_OPERAND (exp, 0);
7812 break;
7814 case IMAGPART_EXPR:
7815 /* The imaginary part of the complex number is always second.
7816 The expression is therefore always offset by the size of the
7817 scalar type. */
7818 offset = 0;
7819 bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
7820 inner = TREE_OPERAND (exp, 0);
7821 break;
7823 case COMPOUND_LITERAL_EXPR:
7824 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7825 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7826 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7827 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7828 the initializers aren't gimplified. */
7829 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7830 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7831 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7832 target, tmode, modifier, as);
7833 /* FALLTHRU */
7834 default:
7835 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7836 expand_expr, as that can have various side effects; LABEL_DECLs for
7837 example, may not have their DECL_RTL set yet. Expand the rtl of
7838 CONSTRUCTORs too, which should yield a memory reference for the
7839 constructor's contents. Assume language specific tree nodes can
7840 be expanded in some interesting way. */
7841 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7842 if (DECL_P (exp)
7843 || TREE_CODE (exp) == CONSTRUCTOR
7844 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7846 result = expand_expr (exp, target, tmode,
7847 modifier == EXPAND_INITIALIZER
7848 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7850 /* If the DECL isn't in memory, then the DECL wasn't properly
7851 marked TREE_ADDRESSABLE, which will be either a front-end
7852 or a tree optimizer bug. */
7854 gcc_assert (MEM_P (result));
7855 result = XEXP (result, 0);
7857 /* ??? Is this needed anymore? */
7858 if (DECL_P (exp))
7859 TREE_USED (exp) = 1;
7861 if (modifier != EXPAND_INITIALIZER
7862 && modifier != EXPAND_CONST_ADDRESS
7863 && modifier != EXPAND_SUM)
7864 result = force_operand (result, target);
7865 return result;
7868 /* Pass FALSE as the last argument to get_inner_reference although
7869 we are expanding to RTL. The rationale is that we know how to
7870 handle "aligning nodes" here: we can just bypass them because
7871 they won't change the final object whose address will be returned
7872 (they actually exist only for that purpose). */
7873 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7874 &unsignedp, &reversep, &volatilep);
7875 break;
7878 /* We must have made progress. */
7879 gcc_assert (inner != exp);
7881 subtarget = offset || bitpos ? NULL_RTX : target;
7882 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7883 inner alignment, force the inner to be sufficiently aligned. */
7884 if (CONSTANT_CLASS_P (inner)
7885 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7887 inner = copy_node (inner);
7888 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7889 SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
7890 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7892 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7894 if (offset)
7896 rtx tmp;
7898 if (modifier != EXPAND_NORMAL)
7899 result = force_operand (result, NULL);
7900 tmp = expand_expr (offset, NULL_RTX, tmode,
7901 modifier == EXPAND_INITIALIZER
7902 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7904 /* expand_expr is allowed to return an object in a mode other
7905 than TMODE. If it did, we need to convert. */
7906 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7907 tmp = convert_modes (tmode, GET_MODE (tmp),
7908 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7909 result = convert_memory_address_addr_space (tmode, result, as);
7910 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7912 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7913 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7914 else
7916 subtarget = bitpos ? NULL_RTX : target;
7917 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7918 1, OPTAB_LIB_WIDEN);
7922 if (bitpos)
7924 /* Someone beforehand should have rejected taking the address
7925 of such an object. */
7926 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7928 result = convert_memory_address_addr_space (tmode, result, as);
7929 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7930 if (modifier < EXPAND_SUM)
7931 result = force_operand (result, target);
7934 return result;
7937 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7938 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7940 static rtx
7941 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7942 enum expand_modifier modifier)
7944 addr_space_t as = ADDR_SPACE_GENERIC;
7945 scalar_int_mode address_mode = Pmode;
7946 scalar_int_mode pointer_mode = ptr_mode;
7947 machine_mode rmode;
7948 rtx result;
7950 /* Target mode of VOIDmode says "whatever's natural". */
7951 if (tmode == VOIDmode)
7952 tmode = TYPE_MODE (TREE_TYPE (exp));
7954 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7956 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7957 address_mode = targetm.addr_space.address_mode (as);
7958 pointer_mode = targetm.addr_space.pointer_mode (as);
7961 /* We can get called with some Weird Things if the user does silliness
7962 like "(short) &a". In that case, convert_memory_address won't do
7963 the right thing, so ignore the given target mode. */
7964 scalar_int_mode new_tmode = (tmode == pointer_mode
7965 ? pointer_mode
7966 : address_mode);
7968 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7969 new_tmode, modifier, as);
7971 /* Despite expand_expr claims concerning ignoring TMODE when not
7972 strictly convenient, stuff breaks if we don't honor it. Note
7973 that combined with the above, we only do this for pointer modes. */
7974 rmode = GET_MODE (result);
7975 if (rmode == VOIDmode)
7976 rmode = new_tmode;
7977 if (rmode != new_tmode)
7978 result = convert_memory_address_addr_space (new_tmode, result, as);
7980 return result;
7983 /* Generate code for computing CONSTRUCTOR EXP.
7984 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7985 is TRUE, instead of creating a temporary variable in memory
7986 NULL is returned and the caller needs to handle it differently. */
7988 static rtx
7989 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7990 bool avoid_temp_mem)
7992 tree type = TREE_TYPE (exp);
7993 machine_mode mode = TYPE_MODE (type);
7995 /* Try to avoid creating a temporary at all. This is possible
7996 if all of the initializer is zero.
7997 FIXME: try to handle all [0..255] initializers we can handle
7998 with memset. */
7999 if (TREE_STATIC (exp)
8000 && !TREE_ADDRESSABLE (exp)
8001 && target != 0 && mode == BLKmode
8002 && all_zeros_p (exp))
8004 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8005 return target;
8008 /* All elts simple constants => refer to a constant in memory. But
8009 if this is a non-BLKmode mode, let it store a field at a time
8010 since that should make a CONST_INT, CONST_WIDE_INT or
8011 CONST_DOUBLE when we fold. Likewise, if we have a target we can
8012 use, it is best to store directly into the target unless the type
8013 is large enough that memcpy will be used. If we are making an
8014 initializer and all operands are constant, put it in memory as
8015 well.
8017 FIXME: Avoid trying to fill vector constructors piece-meal.
8018 Output them with output_constant_def below unless we're sure
8019 they're zeros. This should go away when vector initializers
8020 are treated like VECTOR_CST instead of arrays. */
8021 if ((TREE_STATIC (exp)
8022 && ((mode == BLKmode
8023 && ! (target != 0 && safe_from_p (target, exp, 1)))
8024 || TREE_ADDRESSABLE (exp)
8025 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8026 && (! can_move_by_pieces
8027 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8028 TYPE_ALIGN (type)))
8029 && ! mostly_zeros_p (exp))))
8030 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8031 && TREE_CONSTANT (exp)))
8033 rtx constructor;
8035 if (avoid_temp_mem)
8036 return NULL_RTX;
8038 constructor = expand_expr_constant (exp, 1, modifier);
8040 if (modifier != EXPAND_CONST_ADDRESS
8041 && modifier != EXPAND_INITIALIZER
8042 && modifier != EXPAND_SUM)
8043 constructor = validize_mem (constructor);
8045 return constructor;
8048 /* Handle calls that pass values in multiple non-contiguous
8049 locations. The Irix 6 ABI has examples of this. */
8050 if (target == 0 || ! safe_from_p (target, exp, 1)
8051 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
8053 if (avoid_temp_mem)
8054 return NULL_RTX;
8056 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8059 store_constructor (exp, target, 0, int_expr_size (exp), false);
8060 return target;
8064 /* expand_expr: generate code for computing expression EXP.
8065 An rtx for the computed value is returned. The value is never null.
8066 In the case of a void EXP, const0_rtx is returned.
8068 The value may be stored in TARGET if TARGET is nonzero.
8069 TARGET is just a suggestion; callers must assume that
8070 the rtx returned may not be the same as TARGET.
8072 If TARGET is CONST0_RTX, it means that the value will be ignored.
8074 If TMODE is not VOIDmode, it suggests generating the
8075 result in mode TMODE. But this is done only when convenient.
8076 Otherwise, TMODE is ignored and the value generated in its natural mode.
8077 TMODE is just a suggestion; callers must assume that
8078 the rtx returned may not have mode TMODE.
8080 Note that TARGET may have neither TMODE nor MODE. In that case, it
8081 probably will not be used.
8083 If MODIFIER is EXPAND_SUM then when EXP is an addition
8084 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8085 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8086 products as above, or REG or MEM, or constant.
8087 Ordinarily in such cases we would output mul or add instructions
8088 and then return a pseudo reg containing the sum.
8090 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8091 it also marks a label as absolutely required (it can't be dead).
8092 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8093 This is used for outputting expressions used in initializers.
8095 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8096 with a constant address even if that address is not normally legitimate.
8097 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8099 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8100 a call parameter. Such targets require special care as we haven't yet
8101 marked TARGET so that it's safe from being trashed by libcalls. We
8102 don't want to use TARGET for anything but the final result;
8103 Intermediate values must go elsewhere. Additionally, calls to
8104 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8106 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8107 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8108 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8109 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8110 recursively.
8112 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8113 In this case, we don't adjust a returned MEM rtx that wouldn't be
8114 sufficiently aligned for its mode; instead, it's up to the caller
8115 to deal with it afterwards. This is used to make sure that unaligned
8116 base objects for which out-of-bounds accesses are supported, for
8117 example record types with trailing arrays, aren't realigned behind
8118 the back of the caller.
8119 The normal operating mode is to pass FALSE for this parameter. */
8122 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8123 enum expand_modifier modifier, rtx *alt_rtl,
8124 bool inner_reference_p)
8126 rtx ret;
8128 /* Handle ERROR_MARK before anybody tries to access its type. */
8129 if (TREE_CODE (exp) == ERROR_MARK
8130 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8132 ret = CONST0_RTX (tmode);
8133 return ret ? ret : const0_rtx;
8136 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8137 inner_reference_p);
8138 return ret;
8141 /* Try to expand the conditional expression which is represented by
8142 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
8143 return the rtl reg which represents the result. Otherwise return
8144 NULL_RTX. */
8146 static rtx
8147 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8148 tree treeop1 ATTRIBUTE_UNUSED,
8149 tree treeop2 ATTRIBUTE_UNUSED)
8151 rtx insn;
8152 rtx op00, op01, op1, op2;
8153 enum rtx_code comparison_code;
8154 machine_mode comparison_mode;
8155 gimple *srcstmt;
8156 rtx temp;
8157 tree type = TREE_TYPE (treeop1);
8158 int unsignedp = TYPE_UNSIGNED (type);
8159 machine_mode mode = TYPE_MODE (type);
8160 machine_mode orig_mode = mode;
8161 static bool expanding_cond_expr_using_cmove = false;
8163 /* Conditional move expansion can end up TERing two operands which,
8164 when recursively hitting conditional expressions can result in
8165 exponential behavior if the cmove expansion ultimatively fails.
8166 It's hardly profitable to TER a cmove into a cmove so avoid doing
8167 that by failing early if we end up recursing. */
8168 if (expanding_cond_expr_using_cmove)
8169 return NULL_RTX;
8171 /* If we cannot do a conditional move on the mode, try doing it
8172 with the promoted mode. */
8173 if (!can_conditionally_move_p (mode))
8175 mode = promote_mode (type, mode, &unsignedp);
8176 if (!can_conditionally_move_p (mode))
8177 return NULL_RTX;
8178 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8180 else
8181 temp = assign_temp (type, 0, 1);
8183 expanding_cond_expr_using_cmove = true;
8184 start_sequence ();
8185 expand_operands (treeop1, treeop2,
8186 temp, &op1, &op2, EXPAND_NORMAL);
8188 if (TREE_CODE (treeop0) == SSA_NAME
8189 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8191 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8192 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8193 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8194 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8195 comparison_mode = TYPE_MODE (type);
8196 unsignedp = TYPE_UNSIGNED (type);
8197 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8199 else if (COMPARISON_CLASS_P (treeop0))
8201 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8202 enum tree_code cmpcode = TREE_CODE (treeop0);
8203 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8204 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8205 unsignedp = TYPE_UNSIGNED (type);
8206 comparison_mode = TYPE_MODE (type);
8207 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8209 else
8211 op00 = expand_normal (treeop0);
8212 op01 = const0_rtx;
8213 comparison_code = NE;
8214 comparison_mode = GET_MODE (op00);
8215 if (comparison_mode == VOIDmode)
8216 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8218 expanding_cond_expr_using_cmove = false;
8220 if (GET_MODE (op1) != mode)
8221 op1 = gen_lowpart (mode, op1);
8223 if (GET_MODE (op2) != mode)
8224 op2 = gen_lowpart (mode, op2);
8226 /* Try to emit the conditional move. */
8227 insn = emit_conditional_move (temp, comparison_code,
8228 op00, op01, comparison_mode,
8229 op1, op2, mode,
8230 unsignedp);
8232 /* If we could do the conditional move, emit the sequence,
8233 and return. */
8234 if (insn)
8236 rtx_insn *seq = get_insns ();
8237 end_sequence ();
8238 emit_insn (seq);
8239 return convert_modes (orig_mode, mode, temp, 0);
8242 /* Otherwise discard the sequence and fall back to code with
8243 branches. */
8244 end_sequence ();
8245 return NULL_RTX;
8249 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8250 enum expand_modifier modifier)
8252 rtx op0, op1, op2, temp;
8253 rtx_code_label *lab;
8254 tree type;
8255 int unsignedp;
8256 machine_mode mode;
8257 scalar_int_mode int_mode;
8258 enum tree_code code = ops->code;
8259 optab this_optab;
8260 rtx subtarget, original_target;
8261 int ignore;
8262 bool reduce_bit_field;
8263 location_t loc = ops->location;
8264 tree treeop0, treeop1, treeop2;
8265 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8266 ? reduce_to_bit_field_precision ((expr), \
8267 target, \
8268 type) \
8269 : (expr))
8271 type = ops->type;
8272 mode = TYPE_MODE (type);
8273 unsignedp = TYPE_UNSIGNED (type);
8275 treeop0 = ops->op0;
8276 treeop1 = ops->op1;
8277 treeop2 = ops->op2;
8279 /* We should be called only on simple (binary or unary) expressions,
8280 exactly those that are valid in gimple expressions that aren't
8281 GIMPLE_SINGLE_RHS (or invalid). */
8282 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8283 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8284 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8286 ignore = (target == const0_rtx
8287 || ((CONVERT_EXPR_CODE_P (code)
8288 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8289 && TREE_CODE (type) == VOID_TYPE));
8291 /* We should be called only if we need the result. */
8292 gcc_assert (!ignore);
8294 /* An operation in what may be a bit-field type needs the
8295 result to be reduced to the precision of the bit-field type,
8296 which is narrower than that of the type's mode. */
8297 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8298 && !type_has_mode_precision_p (type));
8300 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8301 target = 0;
8303 /* Use subtarget as the target for operand 0 of a binary operation. */
8304 subtarget = get_subtarget (target);
8305 original_target = target;
8307 switch (code)
8309 case NON_LVALUE_EXPR:
8310 case PAREN_EXPR:
8311 CASE_CONVERT:
8312 if (treeop0 == error_mark_node)
8313 return const0_rtx;
8315 if (TREE_CODE (type) == UNION_TYPE)
8317 tree valtype = TREE_TYPE (treeop0);
8319 /* If both input and output are BLKmode, this conversion isn't doing
8320 anything except possibly changing memory attribute. */
8321 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8323 rtx result = expand_expr (treeop0, target, tmode,
8324 modifier);
8326 result = copy_rtx (result);
8327 set_mem_attributes (result, type, 0);
8328 return result;
8331 if (target == 0)
8333 if (TYPE_MODE (type) != BLKmode)
8334 target = gen_reg_rtx (TYPE_MODE (type));
8335 else
8336 target = assign_temp (type, 1, 1);
8339 if (MEM_P (target))
8340 /* Store data into beginning of memory target. */
8341 store_expr (treeop0,
8342 adjust_address (target, TYPE_MODE (valtype), 0),
8343 modifier == EXPAND_STACK_PARM,
8344 false, TYPE_REVERSE_STORAGE_ORDER (type));
8346 else
8348 gcc_assert (REG_P (target)
8349 && !TYPE_REVERSE_STORAGE_ORDER (type));
8351 /* Store this field into a union of the proper type. */
8352 store_field (target,
8353 MIN ((int_size_in_bytes (TREE_TYPE
8354 (treeop0))
8355 * BITS_PER_UNIT),
8356 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8357 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8358 false, false);
8361 /* Return the entire union. */
8362 return target;
8365 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8367 op0 = expand_expr (treeop0, target, VOIDmode,
8368 modifier);
8370 /* If the signedness of the conversion differs and OP0 is
8371 a promoted SUBREG, clear that indication since we now
8372 have to do the proper extension. */
8373 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8374 && GET_CODE (op0) == SUBREG)
8375 SUBREG_PROMOTED_VAR_P (op0) = 0;
8377 return REDUCE_BIT_FIELD (op0);
8380 op0 = expand_expr (treeop0, NULL_RTX, mode,
8381 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8382 if (GET_MODE (op0) == mode)
8385 /* If OP0 is a constant, just convert it into the proper mode. */
8386 else if (CONSTANT_P (op0))
8388 tree inner_type = TREE_TYPE (treeop0);
8389 machine_mode inner_mode = GET_MODE (op0);
8391 if (inner_mode == VOIDmode)
8392 inner_mode = TYPE_MODE (inner_type);
8394 if (modifier == EXPAND_INITIALIZER)
8395 op0 = lowpart_subreg (mode, op0, inner_mode);
8396 else
8397 op0= convert_modes (mode, inner_mode, op0,
8398 TYPE_UNSIGNED (inner_type));
8401 else if (modifier == EXPAND_INITIALIZER)
8402 op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8403 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8405 else if (target == 0)
8406 op0 = convert_to_mode (mode, op0,
8407 TYPE_UNSIGNED (TREE_TYPE
8408 (treeop0)));
8409 else
8411 convert_move (target, op0,
8412 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8413 op0 = target;
8416 return REDUCE_BIT_FIELD (op0);
8418 case ADDR_SPACE_CONVERT_EXPR:
8420 tree treeop0_type = TREE_TYPE (treeop0);
8422 gcc_assert (POINTER_TYPE_P (type));
8423 gcc_assert (POINTER_TYPE_P (treeop0_type));
8425 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8426 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8428 /* Conversions between pointers to the same address space should
8429 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8430 gcc_assert (as_to != as_from);
8432 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8434 /* Ask target code to handle conversion between pointers
8435 to overlapping address spaces. */
8436 if (targetm.addr_space.subset_p (as_to, as_from)
8437 || targetm.addr_space.subset_p (as_from, as_to))
8439 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8441 else
8443 /* For disjoint address spaces, converting anything but a null
8444 pointer invokes undefined behavior. We truncate or extend the
8445 value as if we'd converted via integers, which handles 0 as
8446 required, and all others as the programmer likely expects. */
8447 #ifndef POINTERS_EXTEND_UNSIGNED
8448 const int POINTERS_EXTEND_UNSIGNED = 1;
8449 #endif
8450 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8451 op0, POINTERS_EXTEND_UNSIGNED);
8453 gcc_assert (op0);
8454 return op0;
8457 case POINTER_PLUS_EXPR:
8458 /* Even though the sizetype mode and the pointer's mode can be different
8459 expand is able to handle this correctly and get the correct result out
8460 of the PLUS_EXPR code. */
8461 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8462 if sizetype precision is smaller than pointer precision. */
8463 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8464 treeop1 = fold_convert_loc (loc, type,
8465 fold_convert_loc (loc, ssizetype,
8466 treeop1));
8467 /* If sizetype precision is larger than pointer precision, truncate the
8468 offset to have matching modes. */
8469 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8470 treeop1 = fold_convert_loc (loc, type, treeop1);
8471 /* FALLTHRU */
8473 case PLUS_EXPR:
8474 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8475 something else, make sure we add the register to the constant and
8476 then to the other thing. This case can occur during strength
8477 reduction and doing it this way will produce better code if the
8478 frame pointer or argument pointer is eliminated.
8480 fold-const.c will ensure that the constant is always in the inner
8481 PLUS_EXPR, so the only case we need to do anything about is if
8482 sp, ap, or fp is our second argument, in which case we must swap
8483 the innermost first argument and our second argument. */
8485 if (TREE_CODE (treeop0) == PLUS_EXPR
8486 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8487 && VAR_P (treeop1)
8488 && (DECL_RTL (treeop1) == frame_pointer_rtx
8489 || DECL_RTL (treeop1) == stack_pointer_rtx
8490 || DECL_RTL (treeop1) == arg_pointer_rtx))
8492 gcc_unreachable ();
8495 /* If the result is to be ptr_mode and we are adding an integer to
8496 something, we might be forming a constant. So try to use
8497 plus_constant. If it produces a sum and we can't accept it,
8498 use force_operand. This allows P = &ARR[const] to generate
8499 efficient code on machines where a SYMBOL_REF is not a valid
8500 address.
8502 If this is an EXPAND_SUM call, always return the sum. */
8503 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8504 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8506 if (modifier == EXPAND_STACK_PARM)
8507 target = 0;
8508 if (TREE_CODE (treeop0) == INTEGER_CST
8509 && HWI_COMPUTABLE_MODE_P (mode)
8510 && TREE_CONSTANT (treeop1))
8512 rtx constant_part;
8513 HOST_WIDE_INT wc;
8514 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8516 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8517 EXPAND_SUM);
8518 /* Use wi::shwi to ensure that the constant is
8519 truncated according to the mode of OP1, then sign extended
8520 to a HOST_WIDE_INT. Using the constant directly can result
8521 in non-canonical RTL in a 64x32 cross compile. */
8522 wc = TREE_INT_CST_LOW (treeop0);
8523 constant_part =
8524 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8525 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8526 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8527 op1 = force_operand (op1, target);
8528 return REDUCE_BIT_FIELD (op1);
8531 else if (TREE_CODE (treeop1) == INTEGER_CST
8532 && HWI_COMPUTABLE_MODE_P (mode)
8533 && TREE_CONSTANT (treeop0))
8535 rtx constant_part;
8536 HOST_WIDE_INT wc;
8537 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8539 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8540 (modifier == EXPAND_INITIALIZER
8541 ? EXPAND_INITIALIZER : EXPAND_SUM));
8542 if (! CONSTANT_P (op0))
8544 op1 = expand_expr (treeop1, NULL_RTX,
8545 VOIDmode, modifier);
8546 /* Return a PLUS if modifier says it's OK. */
8547 if (modifier == EXPAND_SUM
8548 || modifier == EXPAND_INITIALIZER)
8549 return simplify_gen_binary (PLUS, mode, op0, op1);
8550 goto binop2;
8552 /* Use wi::shwi to ensure that the constant is
8553 truncated according to the mode of OP1, then sign extended
8554 to a HOST_WIDE_INT. Using the constant directly can result
8555 in non-canonical RTL in a 64x32 cross compile. */
8556 wc = TREE_INT_CST_LOW (treeop1);
8557 constant_part
8558 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8559 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8560 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8561 op0 = force_operand (op0, target);
8562 return REDUCE_BIT_FIELD (op0);
8566 /* Use TER to expand pointer addition of a negated value
8567 as pointer subtraction. */
8568 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8569 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8570 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8571 && TREE_CODE (treeop1) == SSA_NAME
8572 && TYPE_MODE (TREE_TYPE (treeop0))
8573 == TYPE_MODE (TREE_TYPE (treeop1)))
8575 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8576 if (def)
8578 treeop1 = gimple_assign_rhs1 (def);
8579 code = MINUS_EXPR;
8580 goto do_minus;
8584 /* No sense saving up arithmetic to be done
8585 if it's all in the wrong mode to form part of an address.
8586 And force_operand won't know whether to sign-extend or
8587 zero-extend. */
8588 if (modifier != EXPAND_INITIALIZER
8589 && (modifier != EXPAND_SUM || mode != ptr_mode))
8591 expand_operands (treeop0, treeop1,
8592 subtarget, &op0, &op1, modifier);
8593 if (op0 == const0_rtx)
8594 return op1;
8595 if (op1 == const0_rtx)
8596 return op0;
8597 goto binop2;
8600 expand_operands (treeop0, treeop1,
8601 subtarget, &op0, &op1, modifier);
8602 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8604 case MINUS_EXPR:
8605 case POINTER_DIFF_EXPR:
8606 do_minus:
8607 /* For initializers, we are allowed to return a MINUS of two
8608 symbolic constants. Here we handle all cases when both operands
8609 are constant. */
8610 /* Handle difference of two symbolic constants,
8611 for the sake of an initializer. */
8612 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8613 && really_constant_p (treeop0)
8614 && really_constant_p (treeop1))
8616 expand_operands (treeop0, treeop1,
8617 NULL_RTX, &op0, &op1, modifier);
8618 return simplify_gen_binary (MINUS, mode, op0, op1);
8621 /* No sense saving up arithmetic to be done
8622 if it's all in the wrong mode to form part of an address.
8623 And force_operand won't know whether to sign-extend or
8624 zero-extend. */
8625 if (modifier != EXPAND_INITIALIZER
8626 && (modifier != EXPAND_SUM || mode != ptr_mode))
8627 goto binop;
8629 expand_operands (treeop0, treeop1,
8630 subtarget, &op0, &op1, modifier);
8632 /* Convert A - const to A + (-const). */
8633 if (CONST_INT_P (op1))
8635 op1 = negate_rtx (mode, op1);
8636 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8639 goto binop2;
8641 case WIDEN_MULT_PLUS_EXPR:
8642 case WIDEN_MULT_MINUS_EXPR:
8643 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8644 op2 = expand_normal (treeop2);
8645 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8646 target, unsignedp);
8647 return target;
8649 case WIDEN_MULT_EXPR:
8650 /* If first operand is constant, swap them.
8651 Thus the following special case checks need only
8652 check the second operand. */
8653 if (TREE_CODE (treeop0) == INTEGER_CST)
8654 std::swap (treeop0, treeop1);
8656 /* First, check if we have a multiplication of one signed and one
8657 unsigned operand. */
8658 if (TREE_CODE (treeop1) != INTEGER_CST
8659 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8660 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8662 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8663 this_optab = usmul_widen_optab;
8664 if (find_widening_optab_handler (this_optab, mode, innermode)
8665 != CODE_FOR_nothing)
8667 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8668 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8669 EXPAND_NORMAL);
8670 else
8671 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8672 EXPAND_NORMAL);
8673 /* op0 and op1 might still be constant, despite the above
8674 != INTEGER_CST check. Handle it. */
8675 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8677 op0 = convert_modes (innermode, mode, op0, true);
8678 op1 = convert_modes (innermode, mode, op1, false);
8679 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8680 target, unsignedp));
8682 goto binop3;
8685 /* Check for a multiplication with matching signedness. */
8686 else if ((TREE_CODE (treeop1) == INTEGER_CST
8687 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8688 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8689 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8691 tree op0type = TREE_TYPE (treeop0);
8692 machine_mode innermode = TYPE_MODE (op0type);
8693 bool zextend_p = TYPE_UNSIGNED (op0type);
8694 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8695 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8697 if (TREE_CODE (treeop0) != INTEGER_CST)
8699 if (find_widening_optab_handler (this_optab, mode, innermode)
8700 != CODE_FOR_nothing)
8702 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8703 EXPAND_NORMAL);
8704 /* op0 and op1 might still be constant, despite the above
8705 != INTEGER_CST check. Handle it. */
8706 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8708 widen_mult_const:
8709 op0 = convert_modes (innermode, mode, op0, zextend_p);
8711 = convert_modes (innermode, mode, op1,
8712 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8713 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8714 target,
8715 unsignedp));
8717 temp = expand_widening_mult (mode, op0, op1, target,
8718 unsignedp, this_optab);
8719 return REDUCE_BIT_FIELD (temp);
8721 if (find_widening_optab_handler (other_optab, mode, innermode)
8722 != CODE_FOR_nothing
8723 && innermode == word_mode)
8725 rtx htem, hipart;
8726 op0 = expand_normal (treeop0);
8727 if (TREE_CODE (treeop1) == INTEGER_CST)
8728 op1 = convert_modes (word_mode, mode,
8729 expand_normal (treeop1),
8730 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8731 else
8732 op1 = expand_normal (treeop1);
8733 /* op0 and op1 might still be constant, despite the above
8734 != INTEGER_CST check. Handle it. */
8735 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8736 goto widen_mult_const;
8737 temp = expand_binop (mode, other_optab, op0, op1, target,
8738 unsignedp, OPTAB_LIB_WIDEN);
8739 hipart = gen_highpart (word_mode, temp);
8740 htem = expand_mult_highpart_adjust (word_mode, hipart,
8741 op0, op1, hipart,
8742 zextend_p);
8743 if (htem != hipart)
8744 emit_move_insn (hipart, htem);
8745 return REDUCE_BIT_FIELD (temp);
8749 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8750 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8751 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8752 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8754 case FMA_EXPR:
8756 optab opt = fma_optab;
8757 gimple *def0, *def2;
8759 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8760 call. */
8761 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8763 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8764 tree call_expr;
8766 gcc_assert (fn != NULL_TREE);
8767 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8768 return expand_builtin (call_expr, target, subtarget, mode, false);
8771 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8772 /* The multiplication is commutative - look at its 2nd operand
8773 if the first isn't fed by a negate. */
8774 if (!def0)
8776 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8777 /* Swap operands if the 2nd operand is fed by a negate. */
8778 if (def0)
8779 std::swap (treeop0, treeop1);
8781 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8783 op0 = op2 = NULL;
8785 if (def0 && def2
8786 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8788 opt = fnms_optab;
8789 op0 = expand_normal (gimple_assign_rhs1 (def0));
8790 op2 = expand_normal (gimple_assign_rhs1 (def2));
8792 else if (def0
8793 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8795 opt = fnma_optab;
8796 op0 = expand_normal (gimple_assign_rhs1 (def0));
8798 else if (def2
8799 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8801 opt = fms_optab;
8802 op2 = expand_normal (gimple_assign_rhs1 (def2));
8805 if (op0 == NULL)
8806 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8807 if (op2 == NULL)
8808 op2 = expand_normal (treeop2);
8809 op1 = expand_normal (treeop1);
8811 return expand_ternary_op (TYPE_MODE (type), opt,
8812 op0, op1, op2, target, 0);
8815 case MULT_EXPR:
8816 /* If this is a fixed-point operation, then we cannot use the code
8817 below because "expand_mult" doesn't support sat/no-sat fixed-point
8818 multiplications. */
8819 if (ALL_FIXED_POINT_MODE_P (mode))
8820 goto binop;
8822 /* If first operand is constant, swap them.
8823 Thus the following special case checks need only
8824 check the second operand. */
8825 if (TREE_CODE (treeop0) == INTEGER_CST)
8826 std::swap (treeop0, treeop1);
8828 /* Attempt to return something suitable for generating an
8829 indexed address, for machines that support that. */
8831 if (modifier == EXPAND_SUM && mode == ptr_mode
8832 && tree_fits_shwi_p (treeop1))
8834 tree exp1 = treeop1;
8836 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8837 EXPAND_SUM);
8839 if (!REG_P (op0))
8840 op0 = force_operand (op0, NULL_RTX);
8841 if (!REG_P (op0))
8842 op0 = copy_to_mode_reg (mode, op0);
8844 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8845 gen_int_mode (tree_to_shwi (exp1),
8846 TYPE_MODE (TREE_TYPE (exp1)))));
8849 if (modifier == EXPAND_STACK_PARM)
8850 target = 0;
8852 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8853 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8855 case TRUNC_MOD_EXPR:
8856 case FLOOR_MOD_EXPR:
8857 case CEIL_MOD_EXPR:
8858 case ROUND_MOD_EXPR:
8860 case TRUNC_DIV_EXPR:
8861 case FLOOR_DIV_EXPR:
8862 case CEIL_DIV_EXPR:
8863 case ROUND_DIV_EXPR:
8864 case EXACT_DIV_EXPR:
8866 /* If this is a fixed-point operation, then we cannot use the code
8867 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8868 divisions. */
8869 if (ALL_FIXED_POINT_MODE_P (mode))
8870 goto binop;
8872 if (modifier == EXPAND_STACK_PARM)
8873 target = 0;
8874 /* Possible optimization: compute the dividend with EXPAND_SUM
8875 then if the divisor is constant can optimize the case
8876 where some terms of the dividend have coeffs divisible by it. */
8877 expand_operands (treeop0, treeop1,
8878 subtarget, &op0, &op1, EXPAND_NORMAL);
8879 bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
8880 || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
8881 if (SCALAR_INT_MODE_P (mode)
8882 && optimize >= 2
8883 && get_range_pos_neg (treeop0) == 1
8884 && get_range_pos_neg (treeop1) == 1)
8886 /* If both arguments are known to be positive when interpreted
8887 as signed, we can expand it as both signed and unsigned
8888 division or modulo. Choose the cheaper sequence in that case. */
8889 bool speed_p = optimize_insn_for_speed_p ();
8890 do_pending_stack_adjust ();
8891 start_sequence ();
8892 rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
8893 rtx_insn *uns_insns = get_insns ();
8894 end_sequence ();
8895 start_sequence ();
8896 rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
8897 rtx_insn *sgn_insns = get_insns ();
8898 end_sequence ();
8899 unsigned uns_cost = seq_cost (uns_insns, speed_p);
8900 unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
8902 /* If costs are the same then use as tie breaker the other
8903 other factor. */
8904 if (uns_cost == sgn_cost)
8906 uns_cost = seq_cost (uns_insns, !speed_p);
8907 sgn_cost = seq_cost (sgn_insns, !speed_p);
8910 if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
8912 emit_insn (uns_insns);
8913 return uns_ret;
8915 emit_insn (sgn_insns);
8916 return sgn_ret;
8918 return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
8920 case RDIV_EXPR:
8921 goto binop;
8923 case MULT_HIGHPART_EXPR:
8924 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8925 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8926 gcc_assert (temp);
8927 return temp;
8929 case FIXED_CONVERT_EXPR:
8930 op0 = expand_normal (treeop0);
8931 if (target == 0 || modifier == EXPAND_STACK_PARM)
8932 target = gen_reg_rtx (mode);
8934 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8935 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8936 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8937 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8938 else
8939 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8940 return target;
8942 case FIX_TRUNC_EXPR:
8943 op0 = expand_normal (treeop0);
8944 if (target == 0 || modifier == EXPAND_STACK_PARM)
8945 target = gen_reg_rtx (mode);
8946 expand_fix (target, op0, unsignedp);
8947 return target;
8949 case FLOAT_EXPR:
8950 op0 = expand_normal (treeop0);
8951 if (target == 0 || modifier == EXPAND_STACK_PARM)
8952 target = gen_reg_rtx (mode);
8953 /* expand_float can't figure out what to do if FROM has VOIDmode.
8954 So give it the correct mode. With -O, cse will optimize this. */
8955 if (GET_MODE (op0) == VOIDmode)
8956 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8957 op0);
8958 expand_float (target, op0,
8959 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8960 return target;
8962 case NEGATE_EXPR:
8963 op0 = expand_expr (treeop0, subtarget,
8964 VOIDmode, EXPAND_NORMAL);
8965 if (modifier == EXPAND_STACK_PARM)
8966 target = 0;
8967 temp = expand_unop (mode,
8968 optab_for_tree_code (NEGATE_EXPR, type,
8969 optab_default),
8970 op0, target, 0);
8971 gcc_assert (temp);
8972 return REDUCE_BIT_FIELD (temp);
8974 case ABS_EXPR:
8975 op0 = expand_expr (treeop0, subtarget,
8976 VOIDmode, EXPAND_NORMAL);
8977 if (modifier == EXPAND_STACK_PARM)
8978 target = 0;
8980 /* ABS_EXPR is not valid for complex arguments. */
8981 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8982 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8984 /* Unsigned abs is simply the operand. Testing here means we don't
8985 risk generating incorrect code below. */
8986 if (TYPE_UNSIGNED (type))
8987 return op0;
8989 return expand_abs (mode, op0, target, unsignedp,
8990 safe_from_p (target, treeop0, 1));
8992 case MAX_EXPR:
8993 case MIN_EXPR:
8994 target = original_target;
8995 if (target == 0
8996 || modifier == EXPAND_STACK_PARM
8997 || (MEM_P (target) && MEM_VOLATILE_P (target))
8998 || GET_MODE (target) != mode
8999 || (REG_P (target)
9000 && REGNO (target) < FIRST_PSEUDO_REGISTER))
9001 target = gen_reg_rtx (mode);
9002 expand_operands (treeop0, treeop1,
9003 target, &op0, &op1, EXPAND_NORMAL);
9005 /* First try to do it with a special MIN or MAX instruction.
9006 If that does not win, use a conditional jump to select the proper
9007 value. */
9008 this_optab = optab_for_tree_code (code, type, optab_default);
9009 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9010 OPTAB_WIDEN);
9011 if (temp != 0)
9012 return temp;
9014 /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9015 and similarly for MAX <x, y>. */
9016 if (VECTOR_TYPE_P (type))
9018 tree t0 = make_tree (type, op0);
9019 tree t1 = make_tree (type, op1);
9020 tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
9021 type, t0, t1);
9022 return expand_vec_cond_expr (type, comparison, t0, t1,
9023 original_target);
9026 /* At this point, a MEM target is no longer useful; we will get better
9027 code without it. */
9029 if (! REG_P (target))
9030 target = gen_reg_rtx (mode);
9032 /* If op1 was placed in target, swap op0 and op1. */
9033 if (target != op0 && target == op1)
9034 std::swap (op0, op1);
9036 /* We generate better code and avoid problems with op1 mentioning
9037 target by forcing op1 into a pseudo if it isn't a constant. */
9038 if (! CONSTANT_P (op1))
9039 op1 = force_reg (mode, op1);
9042 enum rtx_code comparison_code;
9043 rtx cmpop1 = op1;
9045 if (code == MAX_EXPR)
9046 comparison_code = unsignedp ? GEU : GE;
9047 else
9048 comparison_code = unsignedp ? LEU : LE;
9050 /* Canonicalize to comparisons against 0. */
9051 if (op1 == const1_rtx)
9053 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9054 or (a != 0 ? a : 1) for unsigned.
9055 For MIN we are safe converting (a <= 1 ? a : 1)
9056 into (a <= 0 ? a : 1) */
9057 cmpop1 = const0_rtx;
9058 if (code == MAX_EXPR)
9059 comparison_code = unsignedp ? NE : GT;
9061 if (op1 == constm1_rtx && !unsignedp)
9063 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9064 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9065 cmpop1 = const0_rtx;
9066 if (code == MIN_EXPR)
9067 comparison_code = LT;
9070 /* Use a conditional move if possible. */
9071 if (can_conditionally_move_p (mode))
9073 rtx insn;
9075 start_sequence ();
9077 /* Try to emit the conditional move. */
9078 insn = emit_conditional_move (target, comparison_code,
9079 op0, cmpop1, mode,
9080 op0, op1, mode,
9081 unsignedp);
9083 /* If we could do the conditional move, emit the sequence,
9084 and return. */
9085 if (insn)
9087 rtx_insn *seq = get_insns ();
9088 end_sequence ();
9089 emit_insn (seq);
9090 return target;
9093 /* Otherwise discard the sequence and fall back to code with
9094 branches. */
9095 end_sequence ();
9098 if (target != op0)
9099 emit_move_insn (target, op0);
9101 lab = gen_label_rtx ();
9102 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9103 unsignedp, mode, NULL_RTX, NULL, lab,
9104 profile_probability::uninitialized ());
9106 emit_move_insn (target, op1);
9107 emit_label (lab);
9108 return target;
9110 case BIT_NOT_EXPR:
9111 op0 = expand_expr (treeop0, subtarget,
9112 VOIDmode, EXPAND_NORMAL);
9113 if (modifier == EXPAND_STACK_PARM)
9114 target = 0;
9115 /* In case we have to reduce the result to bitfield precision
9116 for unsigned bitfield expand this as XOR with a proper constant
9117 instead. */
9118 if (reduce_bit_field && TYPE_UNSIGNED (type))
9120 int_mode = SCALAR_INT_TYPE_MODE (type);
9121 wide_int mask = wi::mask (TYPE_PRECISION (type),
9122 false, GET_MODE_PRECISION (int_mode));
9124 temp = expand_binop (int_mode, xor_optab, op0,
9125 immed_wide_int_const (mask, int_mode),
9126 target, 1, OPTAB_LIB_WIDEN);
9128 else
9129 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9130 gcc_assert (temp);
9131 return temp;
9133 /* ??? Can optimize bitwise operations with one arg constant.
9134 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9135 and (a bitwise1 b) bitwise2 b (etc)
9136 but that is probably not worth while. */
9138 case BIT_AND_EXPR:
9139 case BIT_IOR_EXPR:
9140 case BIT_XOR_EXPR:
9141 goto binop;
9143 case LROTATE_EXPR:
9144 case RROTATE_EXPR:
9145 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9146 || type_has_mode_precision_p (type));
9147 /* fall through */
9149 case LSHIFT_EXPR:
9150 case RSHIFT_EXPR:
9152 /* If this is a fixed-point operation, then we cannot use the code
9153 below because "expand_shift" doesn't support sat/no-sat fixed-point
9154 shifts. */
9155 if (ALL_FIXED_POINT_MODE_P (mode))
9156 goto binop;
9158 if (! safe_from_p (subtarget, treeop1, 1))
9159 subtarget = 0;
9160 if (modifier == EXPAND_STACK_PARM)
9161 target = 0;
9162 op0 = expand_expr (treeop0, subtarget,
9163 VOIDmode, EXPAND_NORMAL);
9165 /* Left shift optimization when shifting across word_size boundary.
9167 If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9168 there isn't native instruction to support this wide mode
9169 left shift. Given below scenario:
9171 Type A = (Type) B << C
9173 |< T >|
9174 | dest_high | dest_low |
9176 | word_size |
9178 If the shift amount C caused we shift B to across the word
9179 size boundary, i.e part of B shifted into high half of
9180 destination register, and part of B remains in the low
9181 half, then GCC will use the following left shift expand
9182 logic:
9184 1. Initialize dest_low to B.
9185 2. Initialize every bit of dest_high to the sign bit of B.
9186 3. Logic left shift dest_low by C bit to finalize dest_low.
9187 The value of dest_low before this shift is kept in a temp D.
9188 4. Logic left shift dest_high by C.
9189 5. Logic right shift D by (word_size - C).
9190 6. Or the result of 4 and 5 to finalize dest_high.
9192 While, by checking gimple statements, if operand B is
9193 coming from signed extension, then we can simplify above
9194 expand logic into:
9196 1. dest_high = src_low >> (word_size - C).
9197 2. dest_low = src_low << C.
9199 We can use one arithmetic right shift to finish all the
9200 purpose of steps 2, 4, 5, 6, thus we reduce the steps
9201 needed from 6 into 2.
9203 The case is similar for zero extension, except that we
9204 initialize dest_high to zero rather than copies of the sign
9205 bit from B. Furthermore, we need to use a logical right shift
9206 in this case.
9208 The choice of sign-extension versus zero-extension is
9209 determined entirely by whether or not B is signed and is
9210 independent of the current setting of unsignedp. */
9212 temp = NULL_RTX;
9213 if (code == LSHIFT_EXPR
9214 && target
9215 && REG_P (target)
9216 && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9217 && mode == int_mode
9218 && TREE_CONSTANT (treeop1)
9219 && TREE_CODE (treeop0) == SSA_NAME)
9221 gimple *def = SSA_NAME_DEF_STMT (treeop0);
9222 if (is_gimple_assign (def)
9223 && gimple_assign_rhs_code (def) == NOP_EXPR)
9225 scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9226 (TREE_TYPE (gimple_assign_rhs1 (def)));
9228 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9229 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9230 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9231 >= GET_MODE_BITSIZE (word_mode)))
9233 rtx_insn *seq, *seq_old;
9234 unsigned int high_off = subreg_highpart_offset (word_mode,
9235 int_mode);
9236 bool extend_unsigned
9237 = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9238 rtx low = lowpart_subreg (word_mode, op0, int_mode);
9239 rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9240 rtx dest_high = simplify_gen_subreg (word_mode, target,
9241 int_mode, high_off);
9242 HOST_WIDE_INT ramount = (BITS_PER_WORD
9243 - TREE_INT_CST_LOW (treeop1));
9244 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9246 start_sequence ();
9247 /* dest_high = src_low >> (word_size - C). */
9248 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9249 rshift, dest_high,
9250 extend_unsigned);
9251 if (temp != dest_high)
9252 emit_move_insn (dest_high, temp);
9254 /* dest_low = src_low << C. */
9255 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9256 treeop1, dest_low, unsignedp);
9257 if (temp != dest_low)
9258 emit_move_insn (dest_low, temp);
9260 seq = get_insns ();
9261 end_sequence ();
9262 temp = target ;
9264 if (have_insn_for (ASHIFT, int_mode))
9266 bool speed_p = optimize_insn_for_speed_p ();
9267 start_sequence ();
9268 rtx ret_old = expand_variable_shift (code, int_mode,
9269 op0, treeop1,
9270 target,
9271 unsignedp);
9273 seq_old = get_insns ();
9274 end_sequence ();
9275 if (seq_cost (seq, speed_p)
9276 >= seq_cost (seq_old, speed_p))
9278 seq = seq_old;
9279 temp = ret_old;
9282 emit_insn (seq);
9287 if (temp == NULL_RTX)
9288 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9289 unsignedp);
9290 if (code == LSHIFT_EXPR)
9291 temp = REDUCE_BIT_FIELD (temp);
9292 return temp;
9295 /* Could determine the answer when only additive constants differ. Also,
9296 the addition of one can be handled by changing the condition. */
9297 case LT_EXPR:
9298 case LE_EXPR:
9299 case GT_EXPR:
9300 case GE_EXPR:
9301 case EQ_EXPR:
9302 case NE_EXPR:
9303 case UNORDERED_EXPR:
9304 case ORDERED_EXPR:
9305 case UNLT_EXPR:
9306 case UNLE_EXPR:
9307 case UNGT_EXPR:
9308 case UNGE_EXPR:
9309 case UNEQ_EXPR:
9310 case LTGT_EXPR:
9312 temp = do_store_flag (ops,
9313 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9314 tmode != VOIDmode ? tmode : mode);
9315 if (temp)
9316 return temp;
9318 /* Use a compare and a jump for BLKmode comparisons, or for function
9319 type comparisons is have_canonicalize_funcptr_for_compare. */
9321 if ((target == 0
9322 || modifier == EXPAND_STACK_PARM
9323 || ! safe_from_p (target, treeop0, 1)
9324 || ! safe_from_p (target, treeop1, 1)
9325 /* Make sure we don't have a hard reg (such as function's return
9326 value) live across basic blocks, if not optimizing. */
9327 || (!optimize && REG_P (target)
9328 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9329 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9331 emit_move_insn (target, const0_rtx);
9333 rtx_code_label *lab1 = gen_label_rtx ();
9334 jumpifnot_1 (code, treeop0, treeop1, lab1,
9335 profile_probability::uninitialized ());
9337 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9338 emit_move_insn (target, constm1_rtx);
9339 else
9340 emit_move_insn (target, const1_rtx);
9342 emit_label (lab1);
9343 return target;
9345 case COMPLEX_EXPR:
9346 /* Get the rtx code of the operands. */
9347 op0 = expand_normal (treeop0);
9348 op1 = expand_normal (treeop1);
9350 if (!target)
9351 target = gen_reg_rtx (TYPE_MODE (type));
9352 else
9353 /* If target overlaps with op1, then either we need to force
9354 op1 into a pseudo (if target also overlaps with op0),
9355 or write the complex parts in reverse order. */
9356 switch (GET_CODE (target))
9358 case CONCAT:
9359 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9361 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9363 complex_expr_force_op1:
9364 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9365 emit_move_insn (temp, op1);
9366 op1 = temp;
9367 break;
9369 complex_expr_swap_order:
9370 /* Move the imaginary (op1) and real (op0) parts to their
9371 location. */
9372 write_complex_part (target, op1, true);
9373 write_complex_part (target, op0, false);
9375 return target;
9377 break;
9378 case MEM:
9379 temp = adjust_address_nv (target,
9380 GET_MODE_INNER (GET_MODE (target)), 0);
9381 if (reg_overlap_mentioned_p (temp, op1))
9383 scalar_mode imode = GET_MODE_INNER (GET_MODE (target));
9384 temp = adjust_address_nv (target, imode,
9385 GET_MODE_SIZE (imode));
9386 if (reg_overlap_mentioned_p (temp, op0))
9387 goto complex_expr_force_op1;
9388 goto complex_expr_swap_order;
9390 break;
9391 default:
9392 if (reg_overlap_mentioned_p (target, op1))
9394 if (reg_overlap_mentioned_p (target, op0))
9395 goto complex_expr_force_op1;
9396 goto complex_expr_swap_order;
9398 break;
9401 /* Move the real (op0) and imaginary (op1) parts to their location. */
9402 write_complex_part (target, op0, false);
9403 write_complex_part (target, op1, true);
9405 return target;
9407 case WIDEN_SUM_EXPR:
9409 tree oprnd0 = treeop0;
9410 tree oprnd1 = treeop1;
9412 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9413 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9414 target, unsignedp);
9415 return target;
9418 case VEC_UNPACK_HI_EXPR:
9419 case VEC_UNPACK_LO_EXPR:
9421 op0 = expand_normal (treeop0);
9422 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9423 target, unsignedp);
9424 gcc_assert (temp);
9425 return temp;
9428 case VEC_UNPACK_FLOAT_HI_EXPR:
9429 case VEC_UNPACK_FLOAT_LO_EXPR:
9431 op0 = expand_normal (treeop0);
9432 /* The signedness is determined from input operand. */
9433 temp = expand_widen_pattern_expr
9434 (ops, op0, NULL_RTX, NULL_RTX,
9435 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9437 gcc_assert (temp);
9438 return temp;
9441 case VEC_WIDEN_MULT_HI_EXPR:
9442 case VEC_WIDEN_MULT_LO_EXPR:
9443 case VEC_WIDEN_MULT_EVEN_EXPR:
9444 case VEC_WIDEN_MULT_ODD_EXPR:
9445 case VEC_WIDEN_LSHIFT_HI_EXPR:
9446 case VEC_WIDEN_LSHIFT_LO_EXPR:
9447 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9448 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9449 target, unsignedp);
9450 gcc_assert (target);
9451 return target;
9453 case VEC_PACK_TRUNC_EXPR:
9454 case VEC_PACK_SAT_EXPR:
9455 case VEC_PACK_FIX_TRUNC_EXPR:
9456 mode = TYPE_MODE (TREE_TYPE (treeop0));
9457 goto binop;
9459 case VEC_PERM_EXPR:
9460 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9461 op2 = expand_normal (treeop2);
9463 /* Careful here: if the target doesn't support integral vector modes,
9464 a constant selection vector could wind up smooshed into a normal
9465 integral constant. */
9466 if (CONSTANT_P (op2) && !VECTOR_MODE_P (GET_MODE (op2)))
9468 tree sel_type = TREE_TYPE (treeop2);
9469 machine_mode vmode
9470 = mode_for_vector (SCALAR_TYPE_MODE (TREE_TYPE (sel_type)),
9471 TYPE_VECTOR_SUBPARTS (sel_type)).require ();
9472 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9473 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9474 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9476 else
9477 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9479 temp = expand_vec_perm (mode, op0, op1, op2, target);
9480 gcc_assert (temp);
9481 return temp;
9483 case DOT_PROD_EXPR:
9485 tree oprnd0 = treeop0;
9486 tree oprnd1 = treeop1;
9487 tree oprnd2 = treeop2;
9488 rtx op2;
9490 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9491 op2 = expand_normal (oprnd2);
9492 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9493 target, unsignedp);
9494 return target;
9497 case SAD_EXPR:
9499 tree oprnd0 = treeop0;
9500 tree oprnd1 = treeop1;
9501 tree oprnd2 = treeop2;
9502 rtx op2;
9504 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9505 op2 = expand_normal (oprnd2);
9506 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9507 target, unsignedp);
9508 return target;
9511 case REALIGN_LOAD_EXPR:
9513 tree oprnd0 = treeop0;
9514 tree oprnd1 = treeop1;
9515 tree oprnd2 = treeop2;
9516 rtx op2;
9518 this_optab = optab_for_tree_code (code, type, optab_default);
9519 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9520 op2 = expand_normal (oprnd2);
9521 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9522 target, unsignedp);
9523 gcc_assert (temp);
9524 return temp;
9527 case COND_EXPR:
9529 /* A COND_EXPR with its type being VOID_TYPE represents a
9530 conditional jump and is handled in
9531 expand_gimple_cond_expr. */
9532 gcc_assert (!VOID_TYPE_P (type));
9534 /* Note that COND_EXPRs whose type is a structure or union
9535 are required to be constructed to contain assignments of
9536 a temporary variable, so that we can evaluate them here
9537 for side effect only. If type is void, we must do likewise. */
9539 gcc_assert (!TREE_ADDRESSABLE (type)
9540 && !ignore
9541 && TREE_TYPE (treeop1) != void_type_node
9542 && TREE_TYPE (treeop2) != void_type_node);
9544 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9545 if (temp)
9546 return temp;
9548 /* If we are not to produce a result, we have no target. Otherwise,
9549 if a target was specified use it; it will not be used as an
9550 intermediate target unless it is safe. If no target, use a
9551 temporary. */
9553 if (modifier != EXPAND_STACK_PARM
9554 && original_target
9555 && safe_from_p (original_target, treeop0, 1)
9556 && GET_MODE (original_target) == mode
9557 && !MEM_P (original_target))
9558 temp = original_target;
9559 else
9560 temp = assign_temp (type, 0, 1);
9562 do_pending_stack_adjust ();
9563 NO_DEFER_POP;
9564 rtx_code_label *lab0 = gen_label_rtx ();
9565 rtx_code_label *lab1 = gen_label_rtx ();
9566 jumpifnot (treeop0, lab0,
9567 profile_probability::uninitialized ());
9568 store_expr (treeop1, temp,
9569 modifier == EXPAND_STACK_PARM,
9570 false, false);
9572 emit_jump_insn (targetm.gen_jump (lab1));
9573 emit_barrier ();
9574 emit_label (lab0);
9575 store_expr (treeop2, temp,
9576 modifier == EXPAND_STACK_PARM,
9577 false, false);
9579 emit_label (lab1);
9580 OK_DEFER_POP;
9581 return temp;
9584 case VEC_COND_EXPR:
9585 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9586 return target;
9588 case VEC_DUPLICATE_EXPR:
9589 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
9590 target = expand_vector_broadcast (mode, op0);
9591 gcc_assert (target);
9592 return target;
9594 case VEC_SERIES_EXPR:
9595 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier);
9596 return expand_vec_series_expr (mode, op0, op1, target);
9598 case BIT_INSERT_EXPR:
9600 unsigned bitpos = tree_to_uhwi (treeop2);
9601 unsigned bitsize;
9602 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9603 bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9604 else
9605 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9606 rtx op0 = expand_normal (treeop0);
9607 rtx op1 = expand_normal (treeop1);
9608 rtx dst = gen_reg_rtx (mode);
9609 emit_move_insn (dst, op0);
9610 store_bit_field (dst, bitsize, bitpos, 0, 0,
9611 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9612 return dst;
9615 default:
9616 gcc_unreachable ();
9619 /* Here to do an ordinary binary operator. */
9620 binop:
9621 expand_operands (treeop0, treeop1,
9622 subtarget, &op0, &op1, EXPAND_NORMAL);
9623 binop2:
9624 this_optab = optab_for_tree_code (code, type, optab_default);
9625 binop3:
9626 if (modifier == EXPAND_STACK_PARM)
9627 target = 0;
9628 temp = expand_binop (mode, this_optab, op0, op1, target,
9629 unsignedp, OPTAB_LIB_WIDEN);
9630 gcc_assert (temp);
9631 /* Bitwise operations do not need bitfield reduction as we expect their
9632 operands being properly truncated. */
9633 if (code == BIT_XOR_EXPR
9634 || code == BIT_AND_EXPR
9635 || code == BIT_IOR_EXPR)
9636 return temp;
9637 return REDUCE_BIT_FIELD (temp);
9639 #undef REDUCE_BIT_FIELD
9642 /* Return TRUE if expression STMT is suitable for replacement.
9643 Never consider memory loads as replaceable, because those don't ever lead
9644 into constant expressions. */
9646 static bool
9647 stmt_is_replaceable_p (gimple *stmt)
9649 if (ssa_is_replaceable_p (stmt))
9651 /* Don't move around loads. */
9652 if (!gimple_assign_single_p (stmt)
9653 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9654 return true;
9656 return false;
9660 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9661 enum expand_modifier modifier, rtx *alt_rtl,
9662 bool inner_reference_p)
9664 rtx op0, op1, temp, decl_rtl;
9665 tree type;
9666 int unsignedp;
9667 machine_mode mode, dmode;
9668 enum tree_code code = TREE_CODE (exp);
9669 rtx subtarget, original_target;
9670 int ignore;
9671 tree context;
9672 bool reduce_bit_field;
9673 location_t loc = EXPR_LOCATION (exp);
9674 struct separate_ops ops;
9675 tree treeop0, treeop1, treeop2;
9676 tree ssa_name = NULL_TREE;
9677 gimple *g;
9679 type = TREE_TYPE (exp);
9680 mode = TYPE_MODE (type);
9681 unsignedp = TYPE_UNSIGNED (type);
9683 treeop0 = treeop1 = treeop2 = NULL_TREE;
9684 if (!VL_EXP_CLASS_P (exp))
9685 switch (TREE_CODE_LENGTH (code))
9687 default:
9688 case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
9689 case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
9690 case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
9691 case 0: break;
9693 ops.code = code;
9694 ops.type = type;
9695 ops.op0 = treeop0;
9696 ops.op1 = treeop1;
9697 ops.op2 = treeop2;
9698 ops.location = loc;
9700 ignore = (target == const0_rtx
9701 || ((CONVERT_EXPR_CODE_P (code)
9702 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9703 && TREE_CODE (type) == VOID_TYPE));
9705 /* An operation in what may be a bit-field type needs the
9706 result to be reduced to the precision of the bit-field type,
9707 which is narrower than that of the type's mode. */
9708 reduce_bit_field = (!ignore
9709 && INTEGRAL_TYPE_P (type)
9710 && !type_has_mode_precision_p (type));
9712 /* If we are going to ignore this result, we need only do something
9713 if there is a side-effect somewhere in the expression. If there
9714 is, short-circuit the most common cases here. Note that we must
9715 not call expand_expr with anything but const0_rtx in case this
9716 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9718 if (ignore)
9720 if (! TREE_SIDE_EFFECTS (exp))
9721 return const0_rtx;
9723 /* Ensure we reference a volatile object even if value is ignored, but
9724 don't do this if all we are doing is taking its address. */
9725 if (TREE_THIS_VOLATILE (exp)
9726 && TREE_CODE (exp) != FUNCTION_DECL
9727 && mode != VOIDmode && mode != BLKmode
9728 && modifier != EXPAND_CONST_ADDRESS)
9730 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9731 if (MEM_P (temp))
9732 copy_to_reg (temp);
9733 return const0_rtx;
9736 if (TREE_CODE_CLASS (code) == tcc_unary
9737 || code == BIT_FIELD_REF
9738 || code == COMPONENT_REF
9739 || code == INDIRECT_REF)
9740 return expand_expr (treeop0, const0_rtx, VOIDmode,
9741 modifier);
9743 else if (TREE_CODE_CLASS (code) == tcc_binary
9744 || TREE_CODE_CLASS (code) == tcc_comparison
9745 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9747 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9748 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9749 return const0_rtx;
9752 target = 0;
9755 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9756 target = 0;
9758 /* Use subtarget as the target for operand 0 of a binary operation. */
9759 subtarget = get_subtarget (target);
9760 original_target = target;
9762 switch (code)
9764 case LABEL_DECL:
9766 tree function = decl_function_context (exp);
9768 temp = label_rtx (exp);
9769 temp = gen_rtx_LABEL_REF (Pmode, temp);
9771 if (function != current_function_decl
9772 && function != 0)
9773 LABEL_REF_NONLOCAL_P (temp) = 1;
9775 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9776 return temp;
9779 case SSA_NAME:
9780 /* ??? ivopts calls expander, without any preparation from
9781 out-of-ssa. So fake instructions as if this was an access to the
9782 base variable. This unnecessarily allocates a pseudo, see how we can
9783 reuse it, if partition base vars have it set already. */
9784 if (!currently_expanding_to_rtl)
9786 tree var = SSA_NAME_VAR (exp);
9787 if (var && DECL_RTL_SET_P (var))
9788 return DECL_RTL (var);
9789 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9790 LAST_VIRTUAL_REGISTER + 1);
9793 g = get_gimple_for_ssa_name (exp);
9794 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9795 if (g == NULL
9796 && modifier == EXPAND_INITIALIZER
9797 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9798 && (optimize || !SSA_NAME_VAR (exp)
9799 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9800 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9801 g = SSA_NAME_DEF_STMT (exp);
9802 if (g)
9804 rtx r;
9805 location_t saved_loc = curr_insn_location ();
9806 location_t loc = gimple_location (g);
9807 if (loc != UNKNOWN_LOCATION)
9808 set_curr_insn_location (loc);
9809 ops.code = gimple_assign_rhs_code (g);
9810 switch (get_gimple_rhs_class (ops.code))
9812 case GIMPLE_TERNARY_RHS:
9813 ops.op2 = gimple_assign_rhs3 (g);
9814 /* Fallthru */
9815 case GIMPLE_BINARY_RHS:
9816 ops.op1 = gimple_assign_rhs2 (g);
9818 /* Try to expand conditonal compare. */
9819 if (targetm.gen_ccmp_first)
9821 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9822 r = expand_ccmp_expr (g, mode);
9823 if (r)
9824 break;
9826 /* Fallthru */
9827 case GIMPLE_UNARY_RHS:
9828 ops.op0 = gimple_assign_rhs1 (g);
9829 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9830 ops.location = loc;
9831 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9832 break;
9833 case GIMPLE_SINGLE_RHS:
9835 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9836 tmode, modifier, alt_rtl,
9837 inner_reference_p);
9838 break;
9840 default:
9841 gcc_unreachable ();
9843 set_curr_insn_location (saved_loc);
9844 if (REG_P (r) && !REG_EXPR (r))
9845 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9846 return r;
9849 ssa_name = exp;
9850 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9851 exp = SSA_NAME_VAR (ssa_name);
9852 goto expand_decl_rtl;
9854 case PARM_DECL:
9855 case VAR_DECL:
9856 /* If a static var's type was incomplete when the decl was written,
9857 but the type is complete now, lay out the decl now. */
9858 if (DECL_SIZE (exp) == 0
9859 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9860 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9861 layout_decl (exp, 0);
9863 /* fall through */
9865 case FUNCTION_DECL:
9866 case RESULT_DECL:
9867 decl_rtl = DECL_RTL (exp);
9868 expand_decl_rtl:
9869 gcc_assert (decl_rtl);
9871 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9872 settings for VECTOR_TYPE_P that might switch for the function. */
9873 if (currently_expanding_to_rtl
9874 && code == VAR_DECL && MEM_P (decl_rtl)
9875 && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
9876 decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
9877 else
9878 decl_rtl = copy_rtx (decl_rtl);
9880 /* Record writes to register variables. */
9881 if (modifier == EXPAND_WRITE
9882 && REG_P (decl_rtl)
9883 && HARD_REGISTER_P (decl_rtl))
9884 add_to_hard_reg_set (&crtl->asm_clobbers,
9885 GET_MODE (decl_rtl), REGNO (decl_rtl));
9887 /* Ensure variable marked as used even if it doesn't go through
9888 a parser. If it hasn't be used yet, write out an external
9889 definition. */
9890 if (exp)
9891 TREE_USED (exp) = 1;
9893 /* Show we haven't gotten RTL for this yet. */
9894 temp = 0;
9896 /* Variables inherited from containing functions should have
9897 been lowered by this point. */
9898 if (exp)
9899 context = decl_function_context (exp);
9900 gcc_assert (!exp
9901 || SCOPE_FILE_SCOPE_P (context)
9902 || context == current_function_decl
9903 || TREE_STATIC (exp)
9904 || DECL_EXTERNAL (exp)
9905 /* ??? C++ creates functions that are not TREE_STATIC. */
9906 || TREE_CODE (exp) == FUNCTION_DECL);
9908 /* This is the case of an array whose size is to be determined
9909 from its initializer, while the initializer is still being parsed.
9910 ??? We aren't parsing while expanding anymore. */
9912 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9913 temp = validize_mem (decl_rtl);
9915 /* If DECL_RTL is memory, we are in the normal case and the
9916 address is not valid, get the address into a register. */
9918 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9920 if (alt_rtl)
9921 *alt_rtl = decl_rtl;
9922 decl_rtl = use_anchored_address (decl_rtl);
9923 if (modifier != EXPAND_CONST_ADDRESS
9924 && modifier != EXPAND_SUM
9925 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9926 : GET_MODE (decl_rtl),
9927 XEXP (decl_rtl, 0),
9928 MEM_ADDR_SPACE (decl_rtl)))
9929 temp = replace_equiv_address (decl_rtl,
9930 copy_rtx (XEXP (decl_rtl, 0)));
9933 /* If we got something, return it. But first, set the alignment
9934 if the address is a register. */
9935 if (temp != 0)
9937 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9938 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9940 return temp;
9943 if (exp)
9944 dmode = DECL_MODE (exp);
9945 else
9946 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9948 /* If the mode of DECL_RTL does not match that of the decl,
9949 there are two cases: we are dealing with a BLKmode value
9950 that is returned in a register, or we are dealing with
9951 a promoted value. In the latter case, return a SUBREG
9952 of the wanted mode, but mark it so that we know that it
9953 was already extended. */
9954 if (REG_P (decl_rtl)
9955 && dmode != BLKmode
9956 && GET_MODE (decl_rtl) != dmode)
9958 machine_mode pmode;
9960 /* Get the signedness to be used for this variable. Ensure we get
9961 the same mode we got when the variable was declared. */
9962 if (code != SSA_NAME)
9963 pmode = promote_decl_mode (exp, &unsignedp);
9964 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9965 && gimple_code (g) == GIMPLE_CALL
9966 && !gimple_call_internal_p (g))
9967 pmode = promote_function_mode (type, mode, &unsignedp,
9968 gimple_call_fntype (g),
9970 else
9971 pmode = promote_ssa_mode (ssa_name, &unsignedp);
9972 gcc_assert (GET_MODE (decl_rtl) == pmode);
9974 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9975 SUBREG_PROMOTED_VAR_P (temp) = 1;
9976 SUBREG_PROMOTED_SET (temp, unsignedp);
9977 return temp;
9980 return decl_rtl;
9982 case INTEGER_CST:
9984 /* Given that TYPE_PRECISION (type) is not always equal to
9985 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9986 the former to the latter according to the signedness of the
9987 type. */
9988 scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
9989 temp = immed_wide_int_const
9990 (wi::to_wide (exp, GET_MODE_PRECISION (mode)), mode);
9991 return temp;
9994 case VECTOR_CST:
9996 tree tmp = NULL_TREE;
9997 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9998 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9999 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
10000 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
10001 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
10002 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
10003 return const_vector_from_tree (exp);
10004 scalar_int_mode int_mode;
10005 if (is_int_mode (mode, &int_mode))
10007 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10008 return const_scalar_mask_from_tree (int_mode, exp);
10009 else
10011 tree type_for_mode
10012 = lang_hooks.types.type_for_mode (int_mode, 1);
10013 if (type_for_mode)
10014 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10015 type_for_mode, exp);
10018 if (!tmp)
10020 vec<constructor_elt, va_gc> *v;
10021 unsigned i;
10022 vec_alloc (v, VECTOR_CST_NELTS (exp));
10023 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
10024 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10025 tmp = build_constructor (type, v);
10027 return expand_expr (tmp, ignore ? const0_rtx : target,
10028 tmode, modifier);
10031 case CONST_DECL:
10032 if (modifier == EXPAND_WRITE)
10034 /* Writing into CONST_DECL is always invalid, but handle it
10035 gracefully. */
10036 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10037 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
10038 op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10039 EXPAND_NORMAL, as);
10040 op0 = memory_address_addr_space (mode, op0, as);
10041 temp = gen_rtx_MEM (mode, op0);
10042 set_mem_addr_space (temp, as);
10043 return temp;
10045 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10047 case REAL_CST:
10048 /* If optimized, generate immediate CONST_DOUBLE
10049 which will be turned into memory by reload if necessary.
10051 We used to force a register so that loop.c could see it. But
10052 this does not allow gen_* patterns to perform optimizations with
10053 the constants. It also produces two insns in cases like "x = 1.0;".
10054 On most machines, floating-point constants are not permitted in
10055 many insns, so we'd end up copying it to a register in any case.
10057 Now, we do the copying in expand_binop, if appropriate. */
10058 return const_double_from_real_value (TREE_REAL_CST (exp),
10059 TYPE_MODE (TREE_TYPE (exp)));
10061 case FIXED_CST:
10062 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10063 TYPE_MODE (TREE_TYPE (exp)));
10065 case COMPLEX_CST:
10066 /* Handle evaluating a complex constant in a CONCAT target. */
10067 if (original_target && GET_CODE (original_target) == CONCAT)
10069 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10070 rtx rtarg, itarg;
10072 rtarg = XEXP (original_target, 0);
10073 itarg = XEXP (original_target, 1);
10075 /* Move the real and imaginary parts separately. */
10076 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10077 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10079 if (op0 != rtarg)
10080 emit_move_insn (rtarg, op0);
10081 if (op1 != itarg)
10082 emit_move_insn (itarg, op1);
10084 return original_target;
10087 /* fall through */
10089 case STRING_CST:
10090 temp = expand_expr_constant (exp, 1, modifier);
10092 /* temp contains a constant address.
10093 On RISC machines where a constant address isn't valid,
10094 make some insns to get that address into a register. */
10095 if (modifier != EXPAND_CONST_ADDRESS
10096 && modifier != EXPAND_INITIALIZER
10097 && modifier != EXPAND_SUM
10098 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10099 MEM_ADDR_SPACE (temp)))
10100 return replace_equiv_address (temp,
10101 copy_rtx (XEXP (temp, 0)));
10102 return temp;
10104 case POLY_INT_CST:
10105 return immed_wide_int_const (poly_int_cst_value (exp), mode);
10107 case SAVE_EXPR:
10109 tree val = treeop0;
10110 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10111 inner_reference_p);
10113 if (!SAVE_EXPR_RESOLVED_P (exp))
10115 /* We can indeed still hit this case, typically via builtin
10116 expanders calling save_expr immediately before expanding
10117 something. Assume this means that we only have to deal
10118 with non-BLKmode values. */
10119 gcc_assert (GET_MODE (ret) != BLKmode);
10121 val = build_decl (curr_insn_location (),
10122 VAR_DECL, NULL, TREE_TYPE (exp));
10123 DECL_ARTIFICIAL (val) = 1;
10124 DECL_IGNORED_P (val) = 1;
10125 treeop0 = val;
10126 TREE_OPERAND (exp, 0) = treeop0;
10127 SAVE_EXPR_RESOLVED_P (exp) = 1;
10129 if (!CONSTANT_P (ret))
10130 ret = copy_to_reg (ret);
10131 SET_DECL_RTL (val, ret);
10134 return ret;
10138 case CONSTRUCTOR:
10139 /* If we don't need the result, just ensure we evaluate any
10140 subexpressions. */
10141 if (ignore)
10143 unsigned HOST_WIDE_INT idx;
10144 tree value;
10146 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10147 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10149 return const0_rtx;
10152 return expand_constructor (exp, target, modifier, false);
10154 case TARGET_MEM_REF:
10156 addr_space_t as
10157 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10158 enum insn_code icode;
10159 unsigned int align;
10161 op0 = addr_for_mem_ref (exp, as, true);
10162 op0 = memory_address_addr_space (mode, op0, as);
10163 temp = gen_rtx_MEM (mode, op0);
10164 set_mem_attributes (temp, exp, 0);
10165 set_mem_addr_space (temp, as);
10166 align = get_object_alignment (exp);
10167 if (modifier != EXPAND_WRITE
10168 && modifier != EXPAND_MEMORY
10169 && mode != BLKmode
10170 && align < GET_MODE_ALIGNMENT (mode)
10171 /* If the target does not have special handling for unaligned
10172 loads of mode then it can use regular moves for them. */
10173 && ((icode = optab_handler (movmisalign_optab, mode))
10174 != CODE_FOR_nothing))
10176 struct expand_operand ops[2];
10178 /* We've already validated the memory, and we're creating a
10179 new pseudo destination. The predicates really can't fail,
10180 nor can the generator. */
10181 create_output_operand (&ops[0], NULL_RTX, mode);
10182 create_fixed_operand (&ops[1], temp);
10183 expand_insn (icode, 2, ops);
10184 temp = ops[0].value;
10186 return temp;
10189 case MEM_REF:
10191 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10192 addr_space_t as
10193 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10194 machine_mode address_mode;
10195 tree base = TREE_OPERAND (exp, 0);
10196 gimple *def_stmt;
10197 enum insn_code icode;
10198 unsigned align;
10199 /* Handle expansion of non-aliased memory with non-BLKmode. That
10200 might end up in a register. */
10201 if (mem_ref_refers_to_non_mem_p (exp))
10203 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
10204 base = TREE_OPERAND (base, 0);
10205 if (offset == 0
10206 && !reverse
10207 && tree_fits_uhwi_p (TYPE_SIZE (type))
10208 && (GET_MODE_BITSIZE (DECL_MODE (base))
10209 == tree_to_uhwi (TYPE_SIZE (type))))
10210 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10211 target, tmode, modifier);
10212 if (TYPE_MODE (type) == BLKmode)
10214 temp = assign_stack_temp (DECL_MODE (base),
10215 GET_MODE_SIZE (DECL_MODE (base)));
10216 store_expr (base, temp, 0, false, false);
10217 temp = adjust_address (temp, BLKmode, offset);
10218 set_mem_size (temp, int_size_in_bytes (type));
10219 return temp;
10221 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10222 bitsize_int (offset * BITS_PER_UNIT));
10223 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10224 return expand_expr (exp, target, tmode, modifier);
10226 address_mode = targetm.addr_space.address_mode (as);
10227 base = TREE_OPERAND (exp, 0);
10228 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10230 tree mask = gimple_assign_rhs2 (def_stmt);
10231 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10232 gimple_assign_rhs1 (def_stmt), mask);
10233 TREE_OPERAND (exp, 0) = base;
10235 align = get_object_alignment (exp);
10236 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10237 op0 = memory_address_addr_space (mode, op0, as);
10238 if (!integer_zerop (TREE_OPERAND (exp, 1)))
10240 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10241 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10242 op0 = memory_address_addr_space (mode, op0, as);
10244 temp = gen_rtx_MEM (mode, op0);
10245 set_mem_attributes (temp, exp, 0);
10246 set_mem_addr_space (temp, as);
10247 if (TREE_THIS_VOLATILE (exp))
10248 MEM_VOLATILE_P (temp) = 1;
10249 if (modifier != EXPAND_WRITE
10250 && modifier != EXPAND_MEMORY
10251 && !inner_reference_p
10252 && mode != BLKmode
10253 && align < GET_MODE_ALIGNMENT (mode))
10255 if ((icode = optab_handler (movmisalign_optab, mode))
10256 != CODE_FOR_nothing)
10258 struct expand_operand ops[2];
10260 /* We've already validated the memory, and we're creating a
10261 new pseudo destination. The predicates really can't fail,
10262 nor can the generator. */
10263 create_output_operand (&ops[0], NULL_RTX, mode);
10264 create_fixed_operand (&ops[1], temp);
10265 expand_insn (icode, 2, ops);
10266 temp = ops[0].value;
10268 else if (targetm.slow_unaligned_access (mode, align))
10269 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10270 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10271 (modifier == EXPAND_STACK_PARM
10272 ? NULL_RTX : target),
10273 mode, mode, false, alt_rtl);
10275 if (reverse
10276 && modifier != EXPAND_MEMORY
10277 && modifier != EXPAND_WRITE)
10278 temp = flip_storage_order (mode, temp);
10279 return temp;
10282 case ARRAY_REF:
10285 tree array = treeop0;
10286 tree index = treeop1;
10287 tree init;
10289 /* Fold an expression like: "foo"[2].
10290 This is not done in fold so it won't happen inside &.
10291 Don't fold if this is for wide characters since it's too
10292 difficult to do correctly and this is a very rare case. */
10294 if (modifier != EXPAND_CONST_ADDRESS
10295 && modifier != EXPAND_INITIALIZER
10296 && modifier != EXPAND_MEMORY)
10298 tree t = fold_read_from_constant_string (exp);
10300 if (t)
10301 return expand_expr (t, target, tmode, modifier);
10304 /* If this is a constant index into a constant array,
10305 just get the value from the array. Handle both the cases when
10306 we have an explicit constructor and when our operand is a variable
10307 that was declared const. */
10309 if (modifier != EXPAND_CONST_ADDRESS
10310 && modifier != EXPAND_INITIALIZER
10311 && modifier != EXPAND_MEMORY
10312 && TREE_CODE (array) == CONSTRUCTOR
10313 && ! TREE_SIDE_EFFECTS (array)
10314 && TREE_CODE (index) == INTEGER_CST)
10316 unsigned HOST_WIDE_INT ix;
10317 tree field, value;
10319 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10320 field, value)
10321 if (tree_int_cst_equal (field, index))
10323 if (!TREE_SIDE_EFFECTS (value))
10324 return expand_expr (fold (value), target, tmode, modifier);
10325 break;
10329 else if (optimize >= 1
10330 && modifier != EXPAND_CONST_ADDRESS
10331 && modifier != EXPAND_INITIALIZER
10332 && modifier != EXPAND_MEMORY
10333 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10334 && TREE_CODE (index) == INTEGER_CST
10335 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10336 && (init = ctor_for_folding (array)) != error_mark_node)
10338 if (init == NULL_TREE)
10340 tree value = build_zero_cst (type);
10341 if (TREE_CODE (value) == CONSTRUCTOR)
10343 /* If VALUE is a CONSTRUCTOR, this optimization is only
10344 useful if this doesn't store the CONSTRUCTOR into
10345 memory. If it does, it is more efficient to just
10346 load the data from the array directly. */
10347 rtx ret = expand_constructor (value, target,
10348 modifier, true);
10349 if (ret == NULL_RTX)
10350 value = NULL_TREE;
10353 if (value)
10354 return expand_expr (value, target, tmode, modifier);
10356 else if (TREE_CODE (init) == CONSTRUCTOR)
10358 unsigned HOST_WIDE_INT ix;
10359 tree field, value;
10361 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10362 field, value)
10363 if (tree_int_cst_equal (field, index))
10365 if (TREE_SIDE_EFFECTS (value))
10366 break;
10368 if (TREE_CODE (value) == CONSTRUCTOR)
10370 /* If VALUE is a CONSTRUCTOR, this
10371 optimization is only useful if
10372 this doesn't store the CONSTRUCTOR
10373 into memory. If it does, it is more
10374 efficient to just load the data from
10375 the array directly. */
10376 rtx ret = expand_constructor (value, target,
10377 modifier, true);
10378 if (ret == NULL_RTX)
10379 break;
10382 return
10383 expand_expr (fold (value), target, tmode, modifier);
10386 else if (TREE_CODE (init) == STRING_CST)
10388 tree low_bound = array_ref_low_bound (exp);
10389 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10391 /* Optimize the special case of a zero lower bound.
10393 We convert the lower bound to sizetype to avoid problems
10394 with constant folding. E.g. suppose the lower bound is
10395 1 and its mode is QI. Without the conversion
10396 (ARRAY + (INDEX - (unsigned char)1))
10397 becomes
10398 (ARRAY + (-(unsigned char)1) + INDEX)
10399 which becomes
10400 (ARRAY + 255 + INDEX). Oops! */
10401 if (!integer_zerop (low_bound))
10402 index1 = size_diffop_loc (loc, index1,
10403 fold_convert_loc (loc, sizetype,
10404 low_bound));
10406 if (tree_fits_uhwi_p (index1)
10407 && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10409 tree type = TREE_TYPE (TREE_TYPE (init));
10410 scalar_int_mode mode;
10412 if (is_int_mode (TYPE_MODE (type), &mode)
10413 && GET_MODE_SIZE (mode) == 1)
10414 return gen_int_mode (TREE_STRING_POINTER (init)
10415 [TREE_INT_CST_LOW (index1)],
10416 mode);
10421 goto normal_inner_ref;
10423 case COMPONENT_REF:
10424 /* If the operand is a CONSTRUCTOR, we can just extract the
10425 appropriate field if it is present. */
10426 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10428 unsigned HOST_WIDE_INT idx;
10429 tree field, value;
10430 scalar_int_mode field_mode;
10432 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10433 idx, field, value)
10434 if (field == treeop1
10435 /* We can normally use the value of the field in the
10436 CONSTRUCTOR. However, if this is a bitfield in
10437 an integral mode that we can fit in a HOST_WIDE_INT,
10438 we must mask only the number of bits in the bitfield,
10439 since this is done implicitly by the constructor. If
10440 the bitfield does not meet either of those conditions,
10441 we can't do this optimization. */
10442 && (! DECL_BIT_FIELD (field)
10443 || (is_int_mode (DECL_MODE (field), &field_mode)
10444 && (GET_MODE_PRECISION (field_mode)
10445 <= HOST_BITS_PER_WIDE_INT))))
10447 if (DECL_BIT_FIELD (field)
10448 && modifier == EXPAND_STACK_PARM)
10449 target = 0;
10450 op0 = expand_expr (value, target, tmode, modifier);
10451 if (DECL_BIT_FIELD (field))
10453 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10454 scalar_int_mode imode
10455 = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10457 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10459 op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10460 imode);
10461 op0 = expand_and (imode, op0, op1, target);
10463 else
10465 int count = GET_MODE_PRECISION (imode) - bitsize;
10467 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10468 target, 0);
10469 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10470 target, 0);
10474 return op0;
10477 goto normal_inner_ref;
10479 case BIT_FIELD_REF:
10480 case ARRAY_RANGE_REF:
10481 normal_inner_ref:
10483 machine_mode mode1, mode2;
10484 HOST_WIDE_INT bitsize, bitpos;
10485 tree offset;
10486 int reversep, volatilep = 0, must_force_mem;
10487 tree tem
10488 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10489 &unsignedp, &reversep, &volatilep);
10490 rtx orig_op0, memloc;
10491 bool clear_mem_expr = false;
10493 /* If we got back the original object, something is wrong. Perhaps
10494 we are evaluating an expression too early. In any event, don't
10495 infinitely recurse. */
10496 gcc_assert (tem != exp);
10498 /* If TEM's type is a union of variable size, pass TARGET to the inner
10499 computation, since it will need a temporary and TARGET is known
10500 to have to do. This occurs in unchecked conversion in Ada. */
10501 orig_op0 = op0
10502 = expand_expr_real (tem,
10503 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10504 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10505 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10506 != INTEGER_CST)
10507 && modifier != EXPAND_STACK_PARM
10508 ? target : NULL_RTX),
10509 VOIDmode,
10510 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10511 NULL, true);
10513 /* If the field has a mode, we want to access it in the
10514 field's mode, not the computed mode.
10515 If a MEM has VOIDmode (external with incomplete type),
10516 use BLKmode for it instead. */
10517 if (MEM_P (op0))
10519 if (mode1 != VOIDmode)
10520 op0 = adjust_address (op0, mode1, 0);
10521 else if (GET_MODE (op0) == VOIDmode)
10522 op0 = adjust_address (op0, BLKmode, 0);
10525 mode2
10526 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10528 /* If we have either an offset, a BLKmode result, or a reference
10529 outside the underlying object, we must force it to memory.
10530 Such a case can occur in Ada if we have unchecked conversion
10531 of an expression from a scalar type to an aggregate type or
10532 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10533 passed a partially uninitialized object or a view-conversion
10534 to a larger size. */
10535 must_force_mem = (offset
10536 || mode1 == BLKmode
10537 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10539 /* Handle CONCAT first. */
10540 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10542 if (bitpos == 0
10543 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))
10544 && COMPLEX_MODE_P (mode1)
10545 && COMPLEX_MODE_P (GET_MODE (op0))
10546 && (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10547 == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10549 if (reversep)
10550 op0 = flip_storage_order (GET_MODE (op0), op0);
10551 if (mode1 != GET_MODE (op0))
10553 rtx parts[2];
10554 for (int i = 0; i < 2; i++)
10556 rtx op = read_complex_part (op0, i != 0);
10557 if (GET_CODE (op) == SUBREG)
10558 op = force_reg (GET_MODE (op), op);
10559 rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10560 op);
10561 if (temp)
10562 op = temp;
10563 else
10565 if (!REG_P (op) && !MEM_P (op))
10566 op = force_reg (GET_MODE (op), op);
10567 op = gen_lowpart (GET_MODE_INNER (mode1), op);
10569 parts[i] = op;
10571 op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10573 return op0;
10575 if (bitpos == 0
10576 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10577 && bitsize)
10579 op0 = XEXP (op0, 0);
10580 mode2 = GET_MODE (op0);
10582 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10583 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10584 && bitpos
10585 && bitsize)
10587 op0 = XEXP (op0, 1);
10588 bitpos = 0;
10589 mode2 = GET_MODE (op0);
10591 else
10592 /* Otherwise force into memory. */
10593 must_force_mem = 1;
10596 /* If this is a constant, put it in a register if it is a legitimate
10597 constant and we don't need a memory reference. */
10598 if (CONSTANT_P (op0)
10599 && mode2 != BLKmode
10600 && targetm.legitimate_constant_p (mode2, op0)
10601 && !must_force_mem)
10602 op0 = force_reg (mode2, op0);
10604 /* Otherwise, if this is a constant, try to force it to the constant
10605 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10606 is a legitimate constant. */
10607 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10608 op0 = validize_mem (memloc);
10610 /* Otherwise, if this is a constant or the object is not in memory
10611 and need be, put it there. */
10612 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10614 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10615 emit_move_insn (memloc, op0);
10616 op0 = memloc;
10617 clear_mem_expr = true;
10620 if (offset)
10622 machine_mode address_mode;
10623 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10624 EXPAND_SUM);
10626 gcc_assert (MEM_P (op0));
10628 address_mode = get_address_mode (op0);
10629 if (GET_MODE (offset_rtx) != address_mode)
10631 /* We cannot be sure that the RTL in offset_rtx is valid outside
10632 of a memory address context, so force it into a register
10633 before attempting to convert it to the desired mode. */
10634 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10635 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10638 /* See the comment in expand_assignment for the rationale. */
10639 if (mode1 != VOIDmode
10640 && bitpos != 0
10641 && bitsize > 0
10642 && (bitpos % bitsize) == 0
10643 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10644 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10646 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10647 bitpos = 0;
10650 op0 = offset_address (op0, offset_rtx,
10651 highest_pow2_factor (offset));
10654 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10655 record its alignment as BIGGEST_ALIGNMENT. */
10656 if (MEM_P (op0) && bitpos == 0 && offset != 0
10657 && is_aligning_offset (offset, tem))
10658 set_mem_align (op0, BIGGEST_ALIGNMENT);
10660 /* Don't forget about volatility even if this is a bitfield. */
10661 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10663 if (op0 == orig_op0)
10664 op0 = copy_rtx (op0);
10666 MEM_VOLATILE_P (op0) = 1;
10669 /* In cases where an aligned union has an unaligned object
10670 as a field, we might be extracting a BLKmode value from
10671 an integer-mode (e.g., SImode) object. Handle this case
10672 by doing the extract into an object as wide as the field
10673 (which we know to be the width of a basic mode), then
10674 storing into memory, and changing the mode to BLKmode. */
10675 if (mode1 == VOIDmode
10676 || REG_P (op0) || GET_CODE (op0) == SUBREG
10677 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10678 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10679 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10680 && modifier != EXPAND_CONST_ADDRESS
10681 && modifier != EXPAND_INITIALIZER
10682 && modifier != EXPAND_MEMORY)
10683 /* If the bitfield is volatile and the bitsize
10684 is narrower than the access size of the bitfield,
10685 we need to extract bitfields from the access. */
10686 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10687 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10688 && mode1 != BLKmode
10689 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10690 /* If the field isn't aligned enough to fetch as a memref,
10691 fetch it as a bit field. */
10692 || (mode1 != BLKmode
10693 && (((MEM_P (op0)
10694 ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10695 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0)
10696 : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10697 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
10698 && modifier != EXPAND_MEMORY
10699 && ((modifier == EXPAND_CONST_ADDRESS
10700 || modifier == EXPAND_INITIALIZER)
10701 ? STRICT_ALIGNMENT
10702 : targetm.slow_unaligned_access (mode1,
10703 MEM_ALIGN (op0))))
10704 || (bitpos % BITS_PER_UNIT != 0)))
10705 /* If the type and the field are a constant size and the
10706 size of the type isn't the same size as the bitfield,
10707 we must use bitfield operations. */
10708 || (bitsize >= 0
10709 && TYPE_SIZE (TREE_TYPE (exp))
10710 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10711 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10712 bitsize) != 0))
10714 machine_mode ext_mode = mode;
10716 if (ext_mode == BLKmode
10717 && ! (target != 0 && MEM_P (op0)
10718 && MEM_P (target)
10719 && bitpos % BITS_PER_UNIT == 0))
10720 ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
10722 if (ext_mode == BLKmode)
10724 if (target == 0)
10725 target = assign_temp (type, 1, 1);
10727 /* ??? Unlike the similar test a few lines below, this one is
10728 very likely obsolete. */
10729 if (bitsize == 0)
10730 return target;
10732 /* In this case, BITPOS must start at a byte boundary and
10733 TARGET, if specified, must be a MEM. */
10734 gcc_assert (MEM_P (op0)
10735 && (!target || MEM_P (target))
10736 && !(bitpos % BITS_PER_UNIT));
10738 emit_block_move (target,
10739 adjust_address (op0, VOIDmode,
10740 bitpos / BITS_PER_UNIT),
10741 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10742 / BITS_PER_UNIT),
10743 (modifier == EXPAND_STACK_PARM
10744 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10746 return target;
10749 /* If we have nothing to extract, the result will be 0 for targets
10750 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10751 return 0 for the sake of consistency, as reading a zero-sized
10752 bitfield is valid in Ada and the value is fully specified. */
10753 if (bitsize == 0)
10754 return const0_rtx;
10756 op0 = validize_mem (op0);
10758 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10759 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10761 /* If the result has a record type and the extraction is done in
10762 an integral mode, then the field may be not aligned on a byte
10763 boundary; in this case, if it has reverse storage order, it
10764 needs to be extracted as a scalar field with reverse storage
10765 order and put back into memory order afterwards. */
10766 if (TREE_CODE (type) == RECORD_TYPE
10767 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10768 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10770 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10771 (modifier == EXPAND_STACK_PARM
10772 ? NULL_RTX : target),
10773 ext_mode, ext_mode, reversep, alt_rtl);
10775 /* If the result has a record type and the mode of OP0 is an
10776 integral mode then, if BITSIZE is narrower than this mode
10777 and this is for big-endian data, we must put the field
10778 into the high-order bits. And we must also put it back
10779 into memory order if it has been previously reversed. */
10780 scalar_int_mode op0_mode;
10781 if (TREE_CODE (type) == RECORD_TYPE
10782 && is_int_mode (GET_MODE (op0), &op0_mode))
10784 HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
10786 if (bitsize < size
10787 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10788 op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
10789 size - bitsize, op0, 1);
10791 if (reversep)
10792 op0 = flip_storage_order (op0_mode, op0);
10795 /* If the result type is BLKmode, store the data into a temporary
10796 of the appropriate type, but with the mode corresponding to the
10797 mode for the data we have (op0's mode). */
10798 if (mode == BLKmode)
10800 rtx new_rtx
10801 = assign_stack_temp_for_type (ext_mode,
10802 GET_MODE_BITSIZE (ext_mode),
10803 type);
10804 emit_move_insn (new_rtx, op0);
10805 op0 = copy_rtx (new_rtx);
10806 PUT_MODE (op0, BLKmode);
10809 return op0;
10812 /* If the result is BLKmode, use that to access the object
10813 now as well. */
10814 if (mode == BLKmode)
10815 mode1 = BLKmode;
10817 /* Get a reference to just this component. */
10818 if (modifier == EXPAND_CONST_ADDRESS
10819 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10820 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10821 else
10822 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10824 if (op0 == orig_op0)
10825 op0 = copy_rtx (op0);
10827 /* Don't set memory attributes if the base expression is
10828 SSA_NAME that got expanded as a MEM. In that case, we should
10829 just honor its original memory attributes. */
10830 if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10831 set_mem_attributes (op0, exp, 0);
10833 if (REG_P (XEXP (op0, 0)))
10834 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10836 /* If op0 is a temporary because the original expressions was forced
10837 to memory, clear MEM_EXPR so that the original expression cannot
10838 be marked as addressable through MEM_EXPR of the temporary. */
10839 if (clear_mem_expr)
10840 set_mem_expr (op0, NULL_TREE);
10842 MEM_VOLATILE_P (op0) |= volatilep;
10844 if (reversep
10845 && modifier != EXPAND_MEMORY
10846 && modifier != EXPAND_WRITE)
10847 op0 = flip_storage_order (mode1, op0);
10849 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10850 || modifier == EXPAND_CONST_ADDRESS
10851 || modifier == EXPAND_INITIALIZER)
10852 return op0;
10854 if (target == 0)
10855 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10857 convert_move (target, op0, unsignedp);
10858 return target;
10861 case OBJ_TYPE_REF:
10862 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10864 case CALL_EXPR:
10865 /* All valid uses of __builtin_va_arg_pack () are removed during
10866 inlining. */
10867 if (CALL_EXPR_VA_ARG_PACK (exp))
10868 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10870 tree fndecl = get_callee_fndecl (exp), attr;
10872 if (fndecl
10873 && (attr = lookup_attribute ("error",
10874 DECL_ATTRIBUTES (fndecl))) != NULL)
10875 error ("%Kcall to %qs declared with attribute error: %s",
10876 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10877 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10878 if (fndecl
10879 && (attr = lookup_attribute ("warning",
10880 DECL_ATTRIBUTES (fndecl))) != NULL)
10881 warning_at (tree_nonartificial_location (exp),
10882 0, "%Kcall to %qs declared with attribute warning: %s",
10883 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10884 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10886 /* Check for a built-in function. */
10887 if (fndecl && DECL_BUILT_IN (fndecl))
10889 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10890 if (CALL_WITH_BOUNDS_P (exp))
10891 return expand_builtin_with_bounds (exp, target, subtarget,
10892 tmode, ignore);
10893 else
10894 return expand_builtin (exp, target, subtarget, tmode, ignore);
10897 return expand_call (exp, target, ignore);
10899 case VIEW_CONVERT_EXPR:
10900 op0 = NULL_RTX;
10902 /* If we are converting to BLKmode, try to avoid an intermediate
10903 temporary by fetching an inner memory reference. */
10904 if (mode == BLKmode
10905 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10906 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10907 && handled_component_p (treeop0))
10909 machine_mode mode1;
10910 HOST_WIDE_INT bitsize, bitpos;
10911 tree offset;
10912 int unsignedp, reversep, volatilep = 0;
10913 tree tem
10914 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10915 &unsignedp, &reversep, &volatilep);
10916 rtx orig_op0;
10918 /* ??? We should work harder and deal with non-zero offsets. */
10919 if (!offset
10920 && (bitpos % BITS_PER_UNIT) == 0
10921 && !reversep
10922 && bitsize >= 0
10923 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10925 /* See the normal_inner_ref case for the rationale. */
10926 orig_op0
10927 = expand_expr_real (tem,
10928 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10929 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10930 != INTEGER_CST)
10931 && modifier != EXPAND_STACK_PARM
10932 ? target : NULL_RTX),
10933 VOIDmode,
10934 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10935 NULL, true);
10937 if (MEM_P (orig_op0))
10939 op0 = orig_op0;
10941 /* Get a reference to just this component. */
10942 if (modifier == EXPAND_CONST_ADDRESS
10943 || modifier == EXPAND_SUM
10944 || modifier == EXPAND_INITIALIZER)
10945 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10946 else
10947 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10949 if (op0 == orig_op0)
10950 op0 = copy_rtx (op0);
10952 set_mem_attributes (op0, treeop0, 0);
10953 if (REG_P (XEXP (op0, 0)))
10954 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10956 MEM_VOLATILE_P (op0) |= volatilep;
10961 if (!op0)
10962 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10963 NULL, inner_reference_p);
10965 /* If the input and output modes are both the same, we are done. */
10966 if (mode == GET_MODE (op0))
10968 /* If neither mode is BLKmode, and both modes are the same size
10969 then we can use gen_lowpart. */
10970 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10971 && (GET_MODE_PRECISION (mode)
10972 == GET_MODE_PRECISION (GET_MODE (op0)))
10973 && !COMPLEX_MODE_P (GET_MODE (op0)))
10975 if (GET_CODE (op0) == SUBREG)
10976 op0 = force_reg (GET_MODE (op0), op0);
10977 temp = gen_lowpart_common (mode, op0);
10978 if (temp)
10979 op0 = temp;
10980 else
10982 if (!REG_P (op0) && !MEM_P (op0))
10983 op0 = force_reg (GET_MODE (op0), op0);
10984 op0 = gen_lowpart (mode, op0);
10987 /* If both types are integral, convert from one mode to the other. */
10988 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10989 op0 = convert_modes (mode, GET_MODE (op0), op0,
10990 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10991 /* If the output type is a bit-field type, do an extraction. */
10992 else if (reduce_bit_field)
10993 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10994 TYPE_UNSIGNED (type), NULL_RTX,
10995 mode, mode, false, NULL);
10996 /* As a last resort, spill op0 to memory, and reload it in a
10997 different mode. */
10998 else if (!MEM_P (op0))
11000 /* If the operand is not a MEM, force it into memory. Since we
11001 are going to be changing the mode of the MEM, don't call
11002 force_const_mem for constants because we don't allow pool
11003 constants to change mode. */
11004 tree inner_type = TREE_TYPE (treeop0);
11006 gcc_assert (!TREE_ADDRESSABLE (exp));
11008 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11009 target
11010 = assign_stack_temp_for_type
11011 (TYPE_MODE (inner_type),
11012 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11014 emit_move_insn (target, op0);
11015 op0 = target;
11018 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
11019 output type is such that the operand is known to be aligned, indicate
11020 that it is. Otherwise, we need only be concerned about alignment for
11021 non-BLKmode results. */
11022 if (MEM_P (op0))
11024 enum insn_code icode;
11026 if (modifier != EXPAND_WRITE
11027 && modifier != EXPAND_MEMORY
11028 && !inner_reference_p
11029 && mode != BLKmode
11030 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11032 /* If the target does have special handling for unaligned
11033 loads of mode then use them. */
11034 if ((icode = optab_handler (movmisalign_optab, mode))
11035 != CODE_FOR_nothing)
11037 rtx reg;
11039 op0 = adjust_address (op0, mode, 0);
11040 /* We've already validated the memory, and we're creating a
11041 new pseudo destination. The predicates really can't
11042 fail. */
11043 reg = gen_reg_rtx (mode);
11045 /* Nor can the insn generator. */
11046 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11047 emit_insn (insn);
11048 return reg;
11050 else if (STRICT_ALIGNMENT)
11052 tree inner_type = TREE_TYPE (treeop0);
11053 HOST_WIDE_INT temp_size
11054 = MAX (int_size_in_bytes (inner_type),
11055 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
11056 rtx new_rtx
11057 = assign_stack_temp_for_type (mode, temp_size, type);
11058 rtx new_with_op0_mode
11059 = adjust_address (new_rtx, GET_MODE (op0), 0);
11061 gcc_assert (!TREE_ADDRESSABLE (exp));
11063 if (GET_MODE (op0) == BLKmode)
11064 emit_block_move (new_with_op0_mode, op0,
11065 GEN_INT (GET_MODE_SIZE (mode)),
11066 (modifier == EXPAND_STACK_PARM
11067 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
11068 else
11069 emit_move_insn (new_with_op0_mode, op0);
11071 op0 = new_rtx;
11075 op0 = adjust_address (op0, mode, 0);
11078 return op0;
11080 case MODIFY_EXPR:
11082 tree lhs = treeop0;
11083 tree rhs = treeop1;
11084 gcc_assert (ignore);
11086 /* Check for |= or &= of a bitfield of size one into another bitfield
11087 of size 1. In this case, (unless we need the result of the
11088 assignment) we can do this more efficiently with a
11089 test followed by an assignment, if necessary.
11091 ??? At this point, we can't get a BIT_FIELD_REF here. But if
11092 things change so we do, this code should be enhanced to
11093 support it. */
11094 if (TREE_CODE (lhs) == COMPONENT_REF
11095 && (TREE_CODE (rhs) == BIT_IOR_EXPR
11096 || TREE_CODE (rhs) == BIT_AND_EXPR)
11097 && TREE_OPERAND (rhs, 0) == lhs
11098 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11099 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11100 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11102 rtx_code_label *label = gen_label_rtx ();
11103 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11104 do_jump (TREE_OPERAND (rhs, 1),
11105 value ? label : 0,
11106 value ? 0 : label,
11107 profile_probability::uninitialized ());
11108 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11109 false);
11110 do_pending_stack_adjust ();
11111 emit_label (label);
11112 return const0_rtx;
11115 expand_assignment (lhs, rhs, false);
11116 return const0_rtx;
11119 case ADDR_EXPR:
11120 return expand_expr_addr_expr (exp, target, tmode, modifier);
11122 case REALPART_EXPR:
11123 op0 = expand_normal (treeop0);
11124 return read_complex_part (op0, false);
11126 case IMAGPART_EXPR:
11127 op0 = expand_normal (treeop0);
11128 return read_complex_part (op0, true);
11130 case RETURN_EXPR:
11131 case LABEL_EXPR:
11132 case GOTO_EXPR:
11133 case SWITCH_EXPR:
11134 case ASM_EXPR:
11135 /* Expanded in cfgexpand.c. */
11136 gcc_unreachable ();
11138 case TRY_CATCH_EXPR:
11139 case CATCH_EXPR:
11140 case EH_FILTER_EXPR:
11141 case TRY_FINALLY_EXPR:
11142 /* Lowered by tree-eh.c. */
11143 gcc_unreachable ();
11145 case WITH_CLEANUP_EXPR:
11146 case CLEANUP_POINT_EXPR:
11147 case TARGET_EXPR:
11148 case CASE_LABEL_EXPR:
11149 case VA_ARG_EXPR:
11150 case BIND_EXPR:
11151 case INIT_EXPR:
11152 case CONJ_EXPR:
11153 case COMPOUND_EXPR:
11154 case PREINCREMENT_EXPR:
11155 case PREDECREMENT_EXPR:
11156 case POSTINCREMENT_EXPR:
11157 case POSTDECREMENT_EXPR:
11158 case LOOP_EXPR:
11159 case EXIT_EXPR:
11160 case COMPOUND_LITERAL_EXPR:
11161 /* Lowered by gimplify.c. */
11162 gcc_unreachable ();
11164 case FDESC_EXPR:
11165 /* Function descriptors are not valid except for as
11166 initialization constants, and should not be expanded. */
11167 gcc_unreachable ();
11169 case WITH_SIZE_EXPR:
11170 /* WITH_SIZE_EXPR expands to its first argument. The caller should
11171 have pulled out the size to use in whatever context it needed. */
11172 return expand_expr_real (treeop0, original_target, tmode,
11173 modifier, alt_rtl, inner_reference_p);
11175 default:
11176 return expand_expr_real_2 (&ops, target, tmode, modifier);
11180 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11181 signedness of TYPE), possibly returning the result in TARGET.
11182 TYPE is known to be a partial integer type. */
11183 static rtx
11184 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11186 HOST_WIDE_INT prec = TYPE_PRECISION (type);
11187 if (target && GET_MODE (target) != GET_MODE (exp))
11188 target = 0;
11189 /* For constant values, reduce using build_int_cst_type. */
11190 if (CONST_INT_P (exp))
11192 HOST_WIDE_INT value = INTVAL (exp);
11193 tree t = build_int_cst_type (type, value);
11194 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11196 else if (TYPE_UNSIGNED (type))
11198 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11199 rtx mask = immed_wide_int_const
11200 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11201 return expand_and (mode, exp, mask, target);
11203 else
11205 scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11206 int count = GET_MODE_PRECISION (mode) - prec;
11207 exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
11208 return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
11212 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11213 when applied to the address of EXP produces an address known to be
11214 aligned more than BIGGEST_ALIGNMENT. */
11216 static int
11217 is_aligning_offset (const_tree offset, const_tree exp)
11219 /* Strip off any conversions. */
11220 while (CONVERT_EXPR_P (offset))
11221 offset = TREE_OPERAND (offset, 0);
11223 /* We must now have a BIT_AND_EXPR with a constant that is one less than
11224 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
11225 if (TREE_CODE (offset) != BIT_AND_EXPR
11226 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11227 || compare_tree_int (TREE_OPERAND (offset, 1),
11228 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11229 || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11230 return 0;
11232 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11233 It must be NEGATE_EXPR. Then strip any more conversions. */
11234 offset = TREE_OPERAND (offset, 0);
11235 while (CONVERT_EXPR_P (offset))
11236 offset = TREE_OPERAND (offset, 0);
11238 if (TREE_CODE (offset) != NEGATE_EXPR)
11239 return 0;
11241 offset = TREE_OPERAND (offset, 0);
11242 while (CONVERT_EXPR_P (offset))
11243 offset = TREE_OPERAND (offset, 0);
11245 /* This must now be the address of EXP. */
11246 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11249 /* Return the tree node if an ARG corresponds to a string constant or zero
11250 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
11251 in bytes within the string that ARG is accessing. The type of the
11252 offset will be `sizetype'. */
11254 tree
11255 string_constant (tree arg, tree *ptr_offset)
11257 tree array, offset, lower_bound;
11258 STRIP_NOPS (arg);
11260 if (TREE_CODE (arg) == ADDR_EXPR)
11262 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11264 *ptr_offset = size_zero_node;
11265 return TREE_OPERAND (arg, 0);
11267 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11269 array = TREE_OPERAND (arg, 0);
11270 offset = size_zero_node;
11272 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11274 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11275 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11276 if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11277 return 0;
11279 /* Check if the array has a nonzero lower bound. */
11280 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11281 if (!integer_zerop (lower_bound))
11283 /* If the offset and base aren't both constants, return 0. */
11284 if (TREE_CODE (lower_bound) != INTEGER_CST)
11285 return 0;
11286 if (TREE_CODE (offset) != INTEGER_CST)
11287 return 0;
11288 /* Adjust offset by the lower bound. */
11289 offset = size_diffop (fold_convert (sizetype, offset),
11290 fold_convert (sizetype, lower_bound));
11293 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11295 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11296 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11297 if (TREE_CODE (array) != ADDR_EXPR)
11298 return 0;
11299 array = TREE_OPERAND (array, 0);
11300 if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11301 return 0;
11303 else
11304 return 0;
11306 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11308 tree arg0 = TREE_OPERAND (arg, 0);
11309 tree arg1 = TREE_OPERAND (arg, 1);
11311 STRIP_NOPS (arg0);
11312 STRIP_NOPS (arg1);
11314 if (TREE_CODE (arg0) == ADDR_EXPR
11315 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11316 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11318 array = TREE_OPERAND (arg0, 0);
11319 offset = arg1;
11321 else if (TREE_CODE (arg1) == ADDR_EXPR
11322 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11323 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11325 array = TREE_OPERAND (arg1, 0);
11326 offset = arg0;
11328 else
11329 return 0;
11331 else
11332 return 0;
11334 if (TREE_CODE (array) == STRING_CST)
11336 *ptr_offset = fold_convert (sizetype, offset);
11337 return array;
11339 else if (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
11341 int length;
11342 tree init = ctor_for_folding (array);
11344 /* Variables initialized to string literals can be handled too. */
11345 if (init == error_mark_node
11346 || !init
11347 || TREE_CODE (init) != STRING_CST)
11348 return 0;
11350 /* Avoid const char foo[4] = "abcde"; */
11351 if (DECL_SIZE_UNIT (array) == NULL_TREE
11352 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11353 || (length = TREE_STRING_LENGTH (init)) <= 0
11354 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11355 return 0;
11357 /* If variable is bigger than the string literal, OFFSET must be constant
11358 and inside of the bounds of the string literal. */
11359 offset = fold_convert (sizetype, offset);
11360 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11361 && (! tree_fits_uhwi_p (offset)
11362 || compare_tree_int (offset, length) >= 0))
11363 return 0;
11365 *ptr_offset = offset;
11366 return init;
11369 return 0;
11372 /* Generate code to calculate OPS, and exploded expression
11373 using a store-flag instruction and return an rtx for the result.
11374 OPS reflects a comparison.
11376 If TARGET is nonzero, store the result there if convenient.
11378 Return zero if there is no suitable set-flag instruction
11379 available on this machine.
11381 Once expand_expr has been called on the arguments of the comparison,
11382 we are committed to doing the store flag, since it is not safe to
11383 re-evaluate the expression. We emit the store-flag insn by calling
11384 emit_store_flag, but only expand the arguments if we have a reason
11385 to believe that emit_store_flag will be successful. If we think that
11386 it will, but it isn't, we have to simulate the store-flag with a
11387 set/jump/set sequence. */
11389 static rtx
11390 do_store_flag (sepops ops, rtx target, machine_mode mode)
11392 enum rtx_code code;
11393 tree arg0, arg1, type;
11394 machine_mode operand_mode;
11395 int unsignedp;
11396 rtx op0, op1;
11397 rtx subtarget = target;
11398 location_t loc = ops->location;
11400 arg0 = ops->op0;
11401 arg1 = ops->op1;
11403 /* Don't crash if the comparison was erroneous. */
11404 if (arg0 == error_mark_node || arg1 == error_mark_node)
11405 return const0_rtx;
11407 type = TREE_TYPE (arg0);
11408 operand_mode = TYPE_MODE (type);
11409 unsignedp = TYPE_UNSIGNED (type);
11411 /* We won't bother with BLKmode store-flag operations because it would mean
11412 passing a lot of information to emit_store_flag. */
11413 if (operand_mode == BLKmode)
11414 return 0;
11416 /* We won't bother with store-flag operations involving function pointers
11417 when function pointers must be canonicalized before comparisons. */
11418 if (targetm.have_canonicalize_funcptr_for_compare ()
11419 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11420 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11421 == FUNCTION_TYPE))
11422 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11423 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11424 == FUNCTION_TYPE))))
11425 return 0;
11427 STRIP_NOPS (arg0);
11428 STRIP_NOPS (arg1);
11430 /* For vector typed comparisons emit code to generate the desired
11431 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11432 expander for this. */
11433 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11435 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11436 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11437 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
11438 return expand_vec_cmp_expr (ops->type, ifexp, target);
11439 else
11441 tree if_true = constant_boolean_node (true, ops->type);
11442 tree if_false = constant_boolean_node (false, ops->type);
11443 return expand_vec_cond_expr (ops->type, ifexp, if_true,
11444 if_false, target);
11448 /* Get the rtx comparison code to use. We know that EXP is a comparison
11449 operation of some type. Some comparisons against 1 and -1 can be
11450 converted to comparisons with zero. Do so here so that the tests
11451 below will be aware that we have a comparison with zero. These
11452 tests will not catch constants in the first operand, but constants
11453 are rarely passed as the first operand. */
11455 switch (ops->code)
11457 case EQ_EXPR:
11458 code = EQ;
11459 break;
11460 case NE_EXPR:
11461 code = NE;
11462 break;
11463 case LT_EXPR:
11464 if (integer_onep (arg1))
11465 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11466 else
11467 code = unsignedp ? LTU : LT;
11468 break;
11469 case LE_EXPR:
11470 if (! unsignedp && integer_all_onesp (arg1))
11471 arg1 = integer_zero_node, code = LT;
11472 else
11473 code = unsignedp ? LEU : LE;
11474 break;
11475 case GT_EXPR:
11476 if (! unsignedp && integer_all_onesp (arg1))
11477 arg1 = integer_zero_node, code = GE;
11478 else
11479 code = unsignedp ? GTU : GT;
11480 break;
11481 case GE_EXPR:
11482 if (integer_onep (arg1))
11483 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11484 else
11485 code = unsignedp ? GEU : GE;
11486 break;
11488 case UNORDERED_EXPR:
11489 code = UNORDERED;
11490 break;
11491 case ORDERED_EXPR:
11492 code = ORDERED;
11493 break;
11494 case UNLT_EXPR:
11495 code = UNLT;
11496 break;
11497 case UNLE_EXPR:
11498 code = UNLE;
11499 break;
11500 case UNGT_EXPR:
11501 code = UNGT;
11502 break;
11503 case UNGE_EXPR:
11504 code = UNGE;
11505 break;
11506 case UNEQ_EXPR:
11507 code = UNEQ;
11508 break;
11509 case LTGT_EXPR:
11510 code = LTGT;
11511 break;
11513 default:
11514 gcc_unreachable ();
11517 /* Put a constant second. */
11518 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11519 || TREE_CODE (arg0) == FIXED_CST)
11521 std::swap (arg0, arg1);
11522 code = swap_condition (code);
11525 /* If this is an equality or inequality test of a single bit, we can
11526 do this by shifting the bit being tested to the low-order bit and
11527 masking the result with the constant 1. If the condition was EQ,
11528 we xor it with 1. This does not require an scc insn and is faster
11529 than an scc insn even if we have it.
11531 The code to make this transformation was moved into fold_single_bit_test,
11532 so we just call into the folder and expand its result. */
11534 if ((code == NE || code == EQ)
11535 && integer_zerop (arg1)
11536 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11538 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11539 if (srcstmt
11540 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11542 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11543 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11544 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11545 gimple_assign_rhs1 (srcstmt),
11546 gimple_assign_rhs2 (srcstmt));
11547 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11548 if (temp)
11549 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11553 if (! get_subtarget (target)
11554 || GET_MODE (subtarget) != operand_mode)
11555 subtarget = 0;
11557 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11559 if (target == 0)
11560 target = gen_reg_rtx (mode);
11562 /* Try a cstore if possible. */
11563 return emit_store_flag_force (target, code, op0, op1,
11564 operand_mode, unsignedp,
11565 (TYPE_PRECISION (ops->type) == 1
11566 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11569 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11570 0 otherwise (i.e. if there is no casesi instruction).
11572 DEFAULT_PROBABILITY is the probability of jumping to the default
11573 label. */
11575 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11576 rtx table_label, rtx default_label, rtx fallback_label,
11577 profile_probability default_probability)
11579 struct expand_operand ops[5];
11580 scalar_int_mode index_mode = SImode;
11581 rtx op1, op2, index;
11583 if (! targetm.have_casesi ())
11584 return 0;
11586 /* The index must be some form of integer. Convert it to SImode. */
11587 scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
11588 if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
11590 rtx rangertx = expand_normal (range);
11592 /* We must handle the endpoints in the original mode. */
11593 index_expr = build2 (MINUS_EXPR, index_type,
11594 index_expr, minval);
11595 minval = integer_zero_node;
11596 index = expand_normal (index_expr);
11597 if (default_label)
11598 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11599 omode, 1, default_label,
11600 default_probability);
11601 /* Now we can safely truncate. */
11602 index = convert_to_mode (index_mode, index, 0);
11604 else
11606 if (omode != index_mode)
11608 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11609 index_expr = fold_convert (index_type, index_expr);
11612 index = expand_normal (index_expr);
11615 do_pending_stack_adjust ();
11617 op1 = expand_normal (minval);
11618 op2 = expand_normal (range);
11620 create_input_operand (&ops[0], index, index_mode);
11621 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11622 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11623 create_fixed_operand (&ops[3], table_label);
11624 create_fixed_operand (&ops[4], (default_label
11625 ? default_label
11626 : fallback_label));
11627 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11628 return 1;
11631 /* Attempt to generate a tablejump instruction; same concept. */
11632 /* Subroutine of the next function.
11634 INDEX is the value being switched on, with the lowest value
11635 in the table already subtracted.
11636 MODE is its expected mode (needed if INDEX is constant).
11637 RANGE is the length of the jump table.
11638 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11640 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11641 index value is out of range.
11642 DEFAULT_PROBABILITY is the probability of jumping to
11643 the default label. */
11645 static void
11646 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11647 rtx default_label, profile_probability default_probability)
11649 rtx temp, vector;
11651 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11652 cfun->cfg->max_jumptable_ents = INTVAL (range);
11654 /* Do an unsigned comparison (in the proper mode) between the index
11655 expression and the value which represents the length of the range.
11656 Since we just finished subtracting the lower bound of the range
11657 from the index expression, this comparison allows us to simultaneously
11658 check that the original index expression value is both greater than
11659 or equal to the minimum value of the range and less than or equal to
11660 the maximum value of the range. */
11662 if (default_label)
11663 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11664 default_label, default_probability);
11667 /* If index is in range, it must fit in Pmode.
11668 Convert to Pmode so we can index with it. */
11669 if (mode != Pmode)
11670 index = convert_to_mode (Pmode, index, 1);
11672 /* Don't let a MEM slip through, because then INDEX that comes
11673 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11674 and break_out_memory_refs will go to work on it and mess it up. */
11675 #ifdef PIC_CASE_VECTOR_ADDRESS
11676 if (flag_pic && !REG_P (index))
11677 index = copy_to_mode_reg (Pmode, index);
11678 #endif
11680 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11681 GET_MODE_SIZE, because this indicates how large insns are. The other
11682 uses should all be Pmode, because they are addresses. This code
11683 could fail if addresses and insns are not the same size. */
11684 index = simplify_gen_binary (MULT, Pmode, index,
11685 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11686 Pmode));
11687 index = simplify_gen_binary (PLUS, Pmode, index,
11688 gen_rtx_LABEL_REF (Pmode, table_label));
11690 #ifdef PIC_CASE_VECTOR_ADDRESS
11691 if (flag_pic)
11692 index = PIC_CASE_VECTOR_ADDRESS (index);
11693 else
11694 #endif
11695 index = memory_address (CASE_VECTOR_MODE, index);
11696 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11697 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11698 convert_move (temp, vector, 0);
11700 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11702 /* If we are generating PIC code or if the table is PC-relative, the
11703 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11704 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11705 emit_barrier ();
11709 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11710 rtx table_label, rtx default_label,
11711 profile_probability default_probability)
11713 rtx index;
11715 if (! targetm.have_tablejump ())
11716 return 0;
11718 index_expr = fold_build2 (MINUS_EXPR, index_type,
11719 fold_convert (index_type, index_expr),
11720 fold_convert (index_type, minval));
11721 index = expand_normal (index_expr);
11722 do_pending_stack_adjust ();
11724 do_tablejump (index, TYPE_MODE (index_type),
11725 convert_modes (TYPE_MODE (index_type),
11726 TYPE_MODE (TREE_TYPE (range)),
11727 expand_normal (range),
11728 TYPE_UNSIGNED (TREE_TYPE (range))),
11729 table_label, default_label, default_probability);
11730 return 1;
11733 /* Return a CONST_VECTOR rtx representing vector mask for
11734 a VECTOR_CST of booleans. */
11735 static rtx
11736 const_vector_mask_from_tree (tree exp)
11738 rtvec v;
11739 unsigned i, units;
11740 tree elt;
11741 machine_mode inner, mode;
11743 mode = TYPE_MODE (TREE_TYPE (exp));
11744 units = VECTOR_CST_NELTS (exp);
11745 inner = GET_MODE_INNER (mode);
11747 v = rtvec_alloc (units);
11749 for (i = 0; i < units; ++i)
11751 elt = VECTOR_CST_ELT (exp, i);
11753 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11754 if (integer_zerop (elt))
11755 RTVEC_ELT (v, i) = CONST0_RTX (inner);
11756 else if (integer_onep (elt)
11757 || integer_minus_onep (elt))
11758 RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11759 else
11760 gcc_unreachable ();
11763 return gen_rtx_CONST_VECTOR (mode, v);
11766 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
11767 Return a constant scalar rtx of mode MODE in which bit X is set if element
11768 X of EXP is nonzero. */
11769 static rtx
11770 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp)
11772 wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11773 tree elt;
11774 unsigned i;
11776 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11778 elt = VECTOR_CST_ELT (exp, i);
11779 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11780 if (integer_all_onesp (elt))
11781 res = wi::set_bit (res, i);
11782 else
11783 gcc_assert (integer_zerop (elt));
11786 return immed_wide_int_const (res, mode);
11789 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11790 static rtx
11791 const_vector_from_tree (tree exp)
11793 rtvec v;
11794 unsigned i, units;
11795 tree elt;
11796 machine_mode inner, mode;
11798 mode = TYPE_MODE (TREE_TYPE (exp));
11800 if (initializer_zerop (exp))
11801 return CONST0_RTX (mode);
11803 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11804 return const_vector_mask_from_tree (exp);
11806 units = VECTOR_CST_NELTS (exp);
11807 inner = GET_MODE_INNER (mode);
11809 v = rtvec_alloc (units);
11811 for (i = 0; i < units; ++i)
11813 elt = VECTOR_CST_ELT (exp, i);
11815 if (TREE_CODE (elt) == REAL_CST)
11816 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11817 inner);
11818 else if (TREE_CODE (elt) == FIXED_CST)
11819 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11820 inner);
11821 else
11822 RTVEC_ELT (v, i) = immed_wide_int_const (wi::to_poly_wide (elt),
11823 inner);
11826 return gen_rtx_CONST_VECTOR (mode, v);
11829 /* Build a decl for a personality function given a language prefix. */
11831 tree
11832 build_personality_function (const char *lang)
11834 const char *unwind_and_version;
11835 tree decl, type;
11836 char *name;
11838 switch (targetm_common.except_unwind_info (&global_options))
11840 case UI_NONE:
11841 return NULL;
11842 case UI_SJLJ:
11843 unwind_and_version = "_sj0";
11844 break;
11845 case UI_DWARF2:
11846 case UI_TARGET:
11847 unwind_and_version = "_v0";
11848 break;
11849 case UI_SEH:
11850 unwind_and_version = "_seh0";
11851 break;
11852 default:
11853 gcc_unreachable ();
11856 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11858 type = build_function_type_list (integer_type_node, integer_type_node,
11859 long_long_unsigned_type_node,
11860 ptr_type_node, ptr_type_node, NULL_TREE);
11861 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11862 get_identifier (name), type);
11863 DECL_ARTIFICIAL (decl) = 1;
11864 DECL_EXTERNAL (decl) = 1;
11865 TREE_PUBLIC (decl) = 1;
11867 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11868 are the flags assigned by targetm.encode_section_info. */
11869 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11871 return decl;
11874 /* Extracts the personality function of DECL and returns the corresponding
11875 libfunc. */
11878 get_personality_function (tree decl)
11880 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11881 enum eh_personality_kind pk;
11883 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11884 if (pk == eh_personality_none)
11885 return NULL;
11887 if (!personality
11888 && pk == eh_personality_any)
11889 personality = lang_hooks.eh_personality ();
11891 if (pk == eh_personality_lang)
11892 gcc_assert (personality != NULL_TREE);
11894 return XEXP (DECL_RTL (personality), 0);
11897 /* Returns a tree for the size of EXP in bytes. */
11899 static tree
11900 tree_expr_size (const_tree exp)
11902 if (DECL_P (exp)
11903 && DECL_SIZE_UNIT (exp) != 0)
11904 return DECL_SIZE_UNIT (exp);
11905 else
11906 return size_in_bytes (TREE_TYPE (exp));
11909 /* Return an rtx for the size in bytes of the value of EXP. */
11912 expr_size (tree exp)
11914 tree size;
11916 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11917 size = TREE_OPERAND (exp, 1);
11918 else
11920 size = tree_expr_size (exp);
11921 gcc_assert (size);
11922 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11925 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11928 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11929 if the size can vary or is larger than an integer. */
11931 static HOST_WIDE_INT
11932 int_expr_size (tree exp)
11934 tree size;
11936 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11937 size = TREE_OPERAND (exp, 1);
11938 else
11940 size = tree_expr_size (exp);
11941 gcc_assert (size);
11944 if (size == 0 || !tree_fits_shwi_p (size))
11945 return -1;
11947 return tree_to_shwi (size);