* config/visium/visium.c (visium_split_double_add): Minor tweaks.
[official-gcc.git] / gcc / expr.c
blob3c7e71f71306cd10d5cffe2db4c37fd51001b611
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "tm_p.h"
30 #include "ssa.h"
31 #include "expmed.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "cgraph.h"
37 #include "diagnostic.h"
38 #include "alias.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
41 #include "attribs.h"
42 #include "varasm.h"
43 #include "except.h"
44 #include "insn-attr.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "stmt.h"
49 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
50 #include "expr.h"
51 #include "optabs-tree.h"
52 #include "libfuncs.h"
53 #include "reload.h"
54 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "tree-ssa-live.h"
57 #include "tree-outof-ssa.h"
58 #include "tree-ssa-address.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "rtl-chkp.h"
62 #include "ccmp.h"
65 /* If this is nonzero, we do not bother generating VOLATILE
66 around volatile memory references, and we are willing to
67 output indirect addresses. If cse is to follow, we reject
68 indirect addresses so a useful potential cse is generated;
69 if it is used only once, instruction combination will produce
70 the same indirect address eventually. */
71 int cse_not_expected;
73 /* This structure is used by move_by_pieces to describe the move to
74 be performed. */
75 struct move_by_pieces_d
77 rtx to;
78 rtx to_addr;
79 int autinc_to;
80 int explicit_inc_to;
81 rtx from;
82 rtx from_addr;
83 int autinc_from;
84 int explicit_inc_from;
85 unsigned HOST_WIDE_INT len;
86 HOST_WIDE_INT offset;
87 int reverse;
90 /* This structure is used by store_by_pieces to describe the clear to
91 be performed. */
93 struct store_by_pieces_d
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
102 void *constfundata;
103 int reverse;
106 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
107 struct move_by_pieces_d *);
108 static bool block_move_libcall_safe_for_call_parm (void);
109 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
110 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
111 unsigned HOST_WIDE_INT);
112 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
113 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
114 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
115 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
116 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
117 struct store_by_pieces_d *);
118 static rtx_insn *compress_float_constant (rtx, rtx);
119 static rtx get_subtarget (rtx);
120 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
121 HOST_WIDE_INT, machine_mode,
122 tree, int, alias_set_type, bool);
123 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
124 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
125 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
126 machine_mode, tree, alias_set_type, bool, bool);
128 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
130 static int is_aligning_offset (const_tree, const_tree);
131 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
132 static rtx do_store_flag (sepops, rtx, machine_mode);
133 #ifdef PUSH_ROUNDING
134 static void emit_single_push_insn (machine_mode, rtx, tree);
135 #endif
136 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
137 static rtx const_vector_from_tree (tree);
138 static rtx const_scalar_mask_from_tree (tree);
139 static tree tree_expr_size (const_tree);
140 static HOST_WIDE_INT int_expr_size (tree);
143 /* This is run to set up which modes can be used
144 directly in memory and to initialize the block move optab. It is run
145 at the beginning of compilation and when the target is reinitialized. */
147 void
148 init_expr_target (void)
150 rtx insn, pat;
151 machine_mode mode;
152 int num_clobbers;
153 rtx mem, mem1;
154 rtx reg;
156 /* Try indexing by frame ptr and try by stack ptr.
157 It is known that on the Convex the stack ptr isn't a valid index.
158 With luck, one or the other is valid on any machine. */
159 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
160 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
162 /* A scratch register we can modify in-place below to avoid
163 useless RTL allocations. */
164 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
166 insn = rtx_alloc (INSN);
167 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
168 PATTERN (insn) = pat;
170 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
171 mode = (machine_mode) ((int) mode + 1))
173 int regno;
175 direct_load[(int) mode] = direct_store[(int) mode] = 0;
176 PUT_MODE (mem, mode);
177 PUT_MODE (mem1, mode);
179 /* See if there is some register that can be used in this mode and
180 directly loaded or stored from memory. */
182 if (mode != VOIDmode && mode != BLKmode)
183 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
184 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
185 regno++)
187 if (! HARD_REGNO_MODE_OK (regno, mode))
188 continue;
190 set_mode_and_regno (reg, mode, regno);
192 SET_SRC (pat) = mem;
193 SET_DEST (pat) = reg;
194 if (recog (pat, insn, &num_clobbers) >= 0)
195 direct_load[(int) mode] = 1;
197 SET_SRC (pat) = mem1;
198 SET_DEST (pat) = reg;
199 if (recog (pat, insn, &num_clobbers) >= 0)
200 direct_load[(int) mode] = 1;
202 SET_SRC (pat) = reg;
203 SET_DEST (pat) = mem;
204 if (recog (pat, insn, &num_clobbers) >= 0)
205 direct_store[(int) mode] = 1;
207 SET_SRC (pat) = reg;
208 SET_DEST (pat) = mem1;
209 if (recog (pat, insn, &num_clobbers) >= 0)
210 direct_store[(int) mode] = 1;
214 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
216 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
217 mode = GET_MODE_WIDER_MODE (mode))
219 machine_mode srcmode;
220 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
221 srcmode = GET_MODE_WIDER_MODE (srcmode))
223 enum insn_code ic;
225 ic = can_extend_p (mode, srcmode, 0);
226 if (ic == CODE_FOR_nothing)
227 continue;
229 PUT_MODE (mem, srcmode);
231 if (insn_operand_matches (ic, 1, mem))
232 float_extend_from_mem[mode][srcmode] = true;
237 /* This is run at the start of compiling a function. */
239 void
240 init_expr (void)
242 memset (&crtl->expr, 0, sizeof (crtl->expr));
245 /* Copy data from FROM to TO, where the machine modes are not the same.
246 Both modes may be integer, or both may be floating, or both may be
247 fixed-point.
248 UNSIGNEDP should be nonzero if FROM is an unsigned type.
249 This causes zero-extension instead of sign-extension. */
251 void
252 convert_move (rtx to, rtx from, int unsignedp)
254 machine_mode to_mode = GET_MODE (to);
255 machine_mode from_mode = GET_MODE (from);
256 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
257 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
258 enum insn_code code;
259 rtx libcall;
261 /* rtx code for making an equivalent value. */
262 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
263 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
266 gcc_assert (to_real == from_real);
267 gcc_assert (to_mode != BLKmode);
268 gcc_assert (from_mode != BLKmode);
270 /* If the source and destination are already the same, then there's
271 nothing to do. */
272 if (to == from)
273 return;
275 /* If FROM is a SUBREG that indicates that we have already done at least
276 the required extension, strip it. We don't handle such SUBREGs as
277 TO here. */
279 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
280 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
281 >= GET_MODE_PRECISION (to_mode))
282 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
283 from = gen_lowpart (to_mode, from), from_mode = to_mode;
285 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
287 if (to_mode == from_mode
288 || (from_mode == VOIDmode && CONSTANT_P (from)))
290 emit_move_insn (to, from);
291 return;
294 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
296 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
298 if (VECTOR_MODE_P (to_mode))
299 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
300 else
301 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
303 emit_move_insn (to, from);
304 return;
307 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
309 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
310 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
311 return;
314 if (to_real)
316 rtx value;
317 rtx_insn *insns;
318 convert_optab tab;
320 gcc_assert ((GET_MODE_PRECISION (from_mode)
321 != GET_MODE_PRECISION (to_mode))
322 || (DECIMAL_FLOAT_MODE_P (from_mode)
323 != DECIMAL_FLOAT_MODE_P (to_mode)));
325 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
326 /* Conversion between decimal float and binary float, same size. */
327 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
328 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
329 tab = sext_optab;
330 else
331 tab = trunc_optab;
333 /* Try converting directly if the insn is supported. */
335 code = convert_optab_handler (tab, to_mode, from_mode);
336 if (code != CODE_FOR_nothing)
338 emit_unop_insn (code, to, from,
339 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
340 return;
343 /* Otherwise use a libcall. */
344 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
346 /* Is this conversion implemented yet? */
347 gcc_assert (libcall);
349 start_sequence ();
350 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
351 1, from, from_mode);
352 insns = get_insns ();
353 end_sequence ();
354 emit_libcall_block (insns, to, value,
355 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
356 from)
357 : gen_rtx_FLOAT_EXTEND (to_mode, from));
358 return;
361 /* Handle pointer conversion. */ /* SPEE 900220. */
362 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
364 convert_optab ctab;
366 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
367 ctab = trunc_optab;
368 else if (unsignedp)
369 ctab = zext_optab;
370 else
371 ctab = sext_optab;
373 if (convert_optab_handler (ctab, to_mode, from_mode)
374 != CODE_FOR_nothing)
376 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
377 to, from, UNKNOWN);
378 return;
382 /* Targets are expected to provide conversion insns between PxImode and
383 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
384 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
386 machine_mode full_mode
387 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
389 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
390 != CODE_FOR_nothing);
392 if (full_mode != from_mode)
393 from = convert_to_mode (full_mode, from, unsignedp);
394 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
395 to, from, UNKNOWN);
396 return;
398 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
400 rtx new_from;
401 machine_mode full_mode
402 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
403 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
404 enum insn_code icode;
406 icode = convert_optab_handler (ctab, full_mode, from_mode);
407 gcc_assert (icode != CODE_FOR_nothing);
409 if (to_mode == full_mode)
411 emit_unop_insn (icode, to, from, UNKNOWN);
412 return;
415 new_from = gen_reg_rtx (full_mode);
416 emit_unop_insn (icode, new_from, from, UNKNOWN);
418 /* else proceed to integer conversions below. */
419 from_mode = full_mode;
420 from = new_from;
423 /* Make sure both are fixed-point modes or both are not. */
424 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
425 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
426 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
428 /* If we widen from_mode to to_mode and they are in the same class,
429 we won't saturate the result.
430 Otherwise, always saturate the result to play safe. */
431 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
432 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
433 expand_fixed_convert (to, from, 0, 0);
434 else
435 expand_fixed_convert (to, from, 0, 1);
436 return;
439 /* Now both modes are integers. */
441 /* Handle expanding beyond a word. */
442 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
443 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
445 rtx_insn *insns;
446 rtx lowpart;
447 rtx fill_value;
448 rtx lowfrom;
449 int i;
450 machine_mode lowpart_mode;
451 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
453 /* Try converting directly if the insn is supported. */
454 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
455 != CODE_FOR_nothing)
457 /* If FROM is a SUBREG, put it into a register. Do this
458 so that we always generate the same set of insns for
459 better cse'ing; if an intermediate assignment occurred,
460 we won't be doing the operation directly on the SUBREG. */
461 if (optimize > 0 && GET_CODE (from) == SUBREG)
462 from = force_reg (from_mode, from);
463 emit_unop_insn (code, to, from, equiv_code);
464 return;
466 /* Next, try converting via full word. */
467 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
468 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
469 != CODE_FOR_nothing))
471 rtx word_to = gen_reg_rtx (word_mode);
472 if (REG_P (to))
474 if (reg_overlap_mentioned_p (to, from))
475 from = force_reg (from_mode, from);
476 emit_clobber (to);
478 convert_move (word_to, from, unsignedp);
479 emit_unop_insn (code, to, word_to, equiv_code);
480 return;
483 /* No special multiword conversion insn; do it by hand. */
484 start_sequence ();
486 /* Since we will turn this into a no conflict block, we must ensure
487 the source does not overlap the target so force it into an isolated
488 register when maybe so. Likewise for any MEM input, since the
489 conversion sequence might require several references to it and we
490 must ensure we're getting the same value every time. */
492 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
493 from = force_reg (from_mode, from);
495 /* Get a copy of FROM widened to a word, if necessary. */
496 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
497 lowpart_mode = word_mode;
498 else
499 lowpart_mode = from_mode;
501 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
503 lowpart = gen_lowpart (lowpart_mode, to);
504 emit_move_insn (lowpart, lowfrom);
506 /* Compute the value to put in each remaining word. */
507 if (unsignedp)
508 fill_value = const0_rtx;
509 else
510 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
511 LT, lowfrom, const0_rtx,
512 lowpart_mode, 0, -1);
514 /* Fill the remaining words. */
515 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
517 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
518 rtx subword = operand_subword (to, index, 1, to_mode);
520 gcc_assert (subword);
522 if (fill_value != subword)
523 emit_move_insn (subword, fill_value);
526 insns = get_insns ();
527 end_sequence ();
529 emit_insn (insns);
530 return;
533 /* Truncating multi-word to a word or less. */
534 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
535 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
537 if (!((MEM_P (from)
538 && ! MEM_VOLATILE_P (from)
539 && direct_load[(int) to_mode]
540 && ! mode_dependent_address_p (XEXP (from, 0),
541 MEM_ADDR_SPACE (from)))
542 || REG_P (from)
543 || GET_CODE (from) == SUBREG))
544 from = force_reg (from_mode, from);
545 convert_move (to, gen_lowpart (word_mode, from), 0);
546 return;
549 /* Now follow all the conversions between integers
550 no more than a word long. */
552 /* For truncation, usually we can just refer to FROM in a narrower mode. */
553 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
554 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
556 if (!((MEM_P (from)
557 && ! MEM_VOLATILE_P (from)
558 && direct_load[(int) to_mode]
559 && ! mode_dependent_address_p (XEXP (from, 0),
560 MEM_ADDR_SPACE (from)))
561 || REG_P (from)
562 || GET_CODE (from) == SUBREG))
563 from = force_reg (from_mode, from);
564 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
565 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
566 from = copy_to_reg (from);
567 emit_move_insn (to, gen_lowpart (to_mode, from));
568 return;
571 /* Handle extension. */
572 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
574 /* Convert directly if that works. */
575 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
576 != CODE_FOR_nothing)
578 emit_unop_insn (code, to, from, equiv_code);
579 return;
581 else
583 machine_mode intermediate;
584 rtx tmp;
585 int shift_amount;
587 /* Search for a mode to convert via. */
588 for (intermediate = from_mode; intermediate != VOIDmode;
589 intermediate = GET_MODE_WIDER_MODE (intermediate))
590 if (((can_extend_p (to_mode, intermediate, unsignedp)
591 != CODE_FOR_nothing)
592 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
593 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
594 && (can_extend_p (intermediate, from_mode, unsignedp)
595 != CODE_FOR_nothing))
597 convert_move (to, convert_to_mode (intermediate, from,
598 unsignedp), unsignedp);
599 return;
602 /* No suitable intermediate mode.
603 Generate what we need with shifts. */
604 shift_amount = (GET_MODE_PRECISION (to_mode)
605 - GET_MODE_PRECISION (from_mode));
606 from = gen_lowpart (to_mode, force_reg (from_mode, from));
607 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
608 to, unsignedp);
609 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
610 to, unsignedp);
611 if (tmp != to)
612 emit_move_insn (to, tmp);
613 return;
617 /* Support special truncate insns for certain modes. */
618 if (convert_optab_handler (trunc_optab, to_mode,
619 from_mode) != CODE_FOR_nothing)
621 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
622 to, from, UNKNOWN);
623 return;
626 /* Handle truncation of volatile memrefs, and so on;
627 the things that couldn't be truncated directly,
628 and for which there was no special instruction.
630 ??? Code above formerly short-circuited this, for most integer
631 mode pairs, with a force_reg in from_mode followed by a recursive
632 call to this routine. Appears always to have been wrong. */
633 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
635 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
636 emit_move_insn (to, temp);
637 return;
640 /* Mode combination is not recognized. */
641 gcc_unreachable ();
644 /* Return an rtx for a value that would result
645 from converting X to mode MODE.
646 Both X and MODE may be floating, or both integer.
647 UNSIGNEDP is nonzero if X is an unsigned value.
648 This can be done by referring to a part of X in place
649 or by copying to a new temporary with conversion. */
652 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
654 return convert_modes (mode, VOIDmode, x, unsignedp);
657 /* Return an rtx for a value that would result
658 from converting X from mode OLDMODE to mode MODE.
659 Both modes may be floating, or both integer.
660 UNSIGNEDP is nonzero if X is an unsigned value.
662 This can be done by referring to a part of X in place
663 or by copying to a new temporary with conversion.
665 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
668 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
670 rtx temp;
672 /* If FROM is a SUBREG that indicates that we have already done at least
673 the required extension, strip it. */
675 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
676 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
677 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
678 x = gen_lowpart (mode, SUBREG_REG (x));
680 if (GET_MODE (x) != VOIDmode)
681 oldmode = GET_MODE (x);
683 if (mode == oldmode)
684 return x;
686 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
688 /* If the caller did not tell us the old mode, then there is not
689 much to do with respect to canonicalization. We have to
690 assume that all the bits are significant. */
691 if (GET_MODE_CLASS (oldmode) != MODE_INT)
692 oldmode = MAX_MODE_INT;
693 wide_int w = wide_int::from (std::make_pair (x, oldmode),
694 GET_MODE_PRECISION (mode),
695 unsignedp ? UNSIGNED : SIGNED);
696 return immed_wide_int_const (w, mode);
699 /* We can do this with a gen_lowpart if both desired and current modes
700 are integer, and this is either a constant integer, a register, or a
701 non-volatile MEM. */
702 if (GET_MODE_CLASS (mode) == MODE_INT
703 && GET_MODE_CLASS (oldmode) == MODE_INT
704 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
705 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
706 || (REG_P (x)
707 && (!HARD_REGISTER_P (x)
708 || HARD_REGNO_MODE_OK (REGNO (x), mode))
709 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
711 return gen_lowpart (mode, x);
713 /* Converting from integer constant into mode is always equivalent to an
714 subreg operation. */
715 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
717 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
718 return simplify_gen_subreg (mode, x, oldmode, 0);
721 temp = gen_reg_rtx (mode);
722 convert_move (temp, x, unsignedp);
723 return temp;
726 /* Return the largest alignment we can use for doing a move (or store)
727 of MAX_PIECES. ALIGN is the largest alignment we could use. */
729 static unsigned int
730 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
732 machine_mode tmode;
734 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
735 if (align >= GET_MODE_ALIGNMENT (tmode))
736 align = GET_MODE_ALIGNMENT (tmode);
737 else
739 machine_mode tmode, xmode;
741 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
742 tmode != VOIDmode;
743 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
744 if (GET_MODE_SIZE (tmode) > max_pieces
745 || SLOW_UNALIGNED_ACCESS (tmode, align))
746 break;
748 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
751 return align;
754 /* Return the widest integer mode no wider than SIZE. If no such mode
755 can be found, return VOIDmode. */
757 static machine_mode
758 widest_int_mode_for_size (unsigned int size)
760 machine_mode tmode, mode = VOIDmode;
762 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
763 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
764 if (GET_MODE_SIZE (tmode) < size)
765 mode = tmode;
767 return mode;
770 /* Determine whether the LEN bytes can be moved by using several move
771 instructions. Return nonzero if a call to move_by_pieces should
772 succeed. */
775 can_move_by_pieces (unsigned HOST_WIDE_INT len,
776 unsigned int align)
778 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
779 optimize_insn_for_speed_p ());
782 /* Generate several move instructions to copy LEN bytes from block FROM to
783 block TO. (These are MEM rtx's with BLKmode).
785 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
786 used to push FROM to the stack.
788 ALIGN is maximum stack alignment we can assume.
790 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
791 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
792 stpcpy. */
795 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
796 unsigned int align, int endp)
798 struct move_by_pieces_d data;
799 machine_mode to_addr_mode;
800 machine_mode from_addr_mode = get_address_mode (from);
801 rtx to_addr, from_addr = XEXP (from, 0);
802 unsigned int max_size = MOVE_MAX_PIECES + 1;
803 enum insn_code icode;
805 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
807 data.offset = 0;
808 data.from_addr = from_addr;
809 if (to)
811 to_addr_mode = get_address_mode (to);
812 to_addr = XEXP (to, 0);
813 data.to = to;
814 data.autinc_to
815 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
816 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
817 data.reverse
818 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
820 else
822 to_addr_mode = VOIDmode;
823 to_addr = NULL_RTX;
824 data.to = NULL_RTX;
825 data.autinc_to = 1;
826 if (STACK_GROWS_DOWNWARD)
827 data.reverse = 1;
828 else
829 data.reverse = 0;
831 data.to_addr = to_addr;
832 data.from = from;
833 data.autinc_from
834 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
835 || GET_CODE (from_addr) == POST_INC
836 || GET_CODE (from_addr) == POST_DEC);
838 data.explicit_inc_from = 0;
839 data.explicit_inc_to = 0;
840 if (data.reverse) data.offset = len;
841 data.len = len;
843 /* If copying requires more than two move insns,
844 copy addresses to registers (to make displacements shorter)
845 and use post-increment if available. */
846 if (!(data.autinc_from && data.autinc_to)
847 && move_by_pieces_ninsns (len, align, max_size) > 2)
849 /* Find the mode of the largest move...
850 MODE might not be used depending on the definitions of the
851 USE_* macros below. */
852 machine_mode mode ATTRIBUTE_UNUSED
853 = widest_int_mode_for_size (max_size);
855 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
857 data.from_addr = copy_to_mode_reg (from_addr_mode,
858 plus_constant (from_addr_mode,
859 from_addr, len));
860 data.autinc_from = 1;
861 data.explicit_inc_from = -1;
863 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
865 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
866 data.autinc_from = 1;
867 data.explicit_inc_from = 1;
869 if (!data.autinc_from && CONSTANT_P (from_addr))
870 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
871 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
873 data.to_addr = copy_to_mode_reg (to_addr_mode,
874 plus_constant (to_addr_mode,
875 to_addr, len));
876 data.autinc_to = 1;
877 data.explicit_inc_to = -1;
879 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
881 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
882 data.autinc_to = 1;
883 data.explicit_inc_to = 1;
885 if (!data.autinc_to && CONSTANT_P (to_addr))
886 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
889 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
891 /* First move what we can in the largest integer mode, then go to
892 successively smaller modes. */
894 while (max_size > 1 && data.len > 0)
896 machine_mode mode = widest_int_mode_for_size (max_size);
898 if (mode == VOIDmode)
899 break;
901 icode = optab_handler (mov_optab, mode);
902 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
903 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
905 max_size = GET_MODE_SIZE (mode);
908 /* The code above should have handled everything. */
909 gcc_assert (!data.len);
911 if (endp)
913 rtx to1;
915 gcc_assert (!data.reverse);
916 if (data.autinc_to)
918 if (endp == 2)
920 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
921 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
922 else
923 data.to_addr = copy_to_mode_reg (to_addr_mode,
924 plus_constant (to_addr_mode,
925 data.to_addr,
926 -1));
928 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
929 data.offset);
931 else
933 if (endp == 2)
934 --data.offset;
935 to1 = adjust_address (data.to, QImode, data.offset);
937 return to1;
939 else
940 return data.to;
943 /* Return number of insns required to move L bytes by pieces.
944 ALIGN (in bits) is maximum alignment we can assume. */
946 unsigned HOST_WIDE_INT
947 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
948 unsigned int max_size)
950 unsigned HOST_WIDE_INT n_insns = 0;
952 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
954 while (max_size > 1 && l > 0)
956 machine_mode mode;
957 enum insn_code icode;
959 mode = widest_int_mode_for_size (max_size);
961 if (mode == VOIDmode)
962 break;
964 icode = optab_handler (mov_optab, mode);
965 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
966 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
968 max_size = GET_MODE_SIZE (mode);
971 gcc_assert (!l);
972 return n_insns;
975 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
976 with move instructions for mode MODE. GENFUN is the gen_... function
977 to make a move insn for that mode. DATA has all the other info. */
979 static void
980 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
981 struct move_by_pieces_d *data)
983 unsigned int size = GET_MODE_SIZE (mode);
984 rtx to1 = NULL_RTX, from1;
986 while (data->len >= size)
988 if (data->reverse)
989 data->offset -= size;
991 if (data->to)
993 if (data->autinc_to)
994 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
995 data->offset);
996 else
997 to1 = adjust_address (data->to, mode, data->offset);
1000 if (data->autinc_from)
1001 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1002 data->offset);
1003 else
1004 from1 = adjust_address (data->from, mode, data->offset);
1006 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1007 emit_insn (gen_add2_insn (data->to_addr,
1008 gen_int_mode (-(HOST_WIDE_INT) size,
1009 GET_MODE (data->to_addr))));
1010 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1011 emit_insn (gen_add2_insn (data->from_addr,
1012 gen_int_mode (-(HOST_WIDE_INT) size,
1013 GET_MODE (data->from_addr))));
1015 if (data->to)
1016 emit_insn ((*genfun) (to1, from1));
1017 else
1019 #ifdef PUSH_ROUNDING
1020 emit_single_push_insn (mode, from1, NULL);
1021 #else
1022 gcc_unreachable ();
1023 #endif
1026 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1027 emit_insn (gen_add2_insn (data->to_addr,
1028 gen_int_mode (size,
1029 GET_MODE (data->to_addr))));
1030 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1031 emit_insn (gen_add2_insn (data->from_addr,
1032 gen_int_mode (size,
1033 GET_MODE (data->from_addr))));
1035 if (! data->reverse)
1036 data->offset += size;
1038 data->len -= size;
1042 /* Emit code to move a block Y to a block X. This may be done with
1043 string-move instructions, with multiple scalar move instructions,
1044 or with a library call.
1046 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1047 SIZE is an rtx that says how long they are.
1048 ALIGN is the maximum alignment we can assume they have.
1049 METHOD describes what kind of copy this is, and what mechanisms may be used.
1050 MIN_SIZE is the minimal size of block to move
1051 MAX_SIZE is the maximal size of block to move, if it can not be represented
1052 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1054 Return the address of the new block, if memcpy is called and returns it,
1055 0 otherwise. */
1058 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1059 unsigned int expected_align, HOST_WIDE_INT expected_size,
1060 unsigned HOST_WIDE_INT min_size,
1061 unsigned HOST_WIDE_INT max_size,
1062 unsigned HOST_WIDE_INT probable_max_size)
1064 bool may_use_call;
1065 rtx retval = 0;
1066 unsigned int align;
1068 gcc_assert (size);
1069 if (CONST_INT_P (size)
1070 && INTVAL (size) == 0)
1071 return 0;
1073 switch (method)
1075 case BLOCK_OP_NORMAL:
1076 case BLOCK_OP_TAILCALL:
1077 may_use_call = true;
1078 break;
1080 case BLOCK_OP_CALL_PARM:
1081 may_use_call = block_move_libcall_safe_for_call_parm ();
1083 /* Make inhibit_defer_pop nonzero around the library call
1084 to force it to pop the arguments right away. */
1085 NO_DEFER_POP;
1086 break;
1088 case BLOCK_OP_NO_LIBCALL:
1089 may_use_call = false;
1090 break;
1092 default:
1093 gcc_unreachable ();
1096 gcc_assert (MEM_P (x) && MEM_P (y));
1097 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1098 gcc_assert (align >= BITS_PER_UNIT);
1100 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1101 block copy is more efficient for other large modes, e.g. DCmode. */
1102 x = adjust_address (x, BLKmode, 0);
1103 y = adjust_address (y, BLKmode, 0);
1105 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1106 can be incorrect is coming from __builtin_memcpy. */
1107 if (CONST_INT_P (size))
1109 x = shallow_copy_rtx (x);
1110 y = shallow_copy_rtx (y);
1111 set_mem_size (x, INTVAL (size));
1112 set_mem_size (y, INTVAL (size));
1115 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1116 move_by_pieces (x, y, INTVAL (size), align, 0);
1117 else if (emit_block_move_via_movmem (x, y, size, align,
1118 expected_align, expected_size,
1119 min_size, max_size, probable_max_size))
1121 else if (may_use_call
1122 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1123 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1125 /* Since x and y are passed to a libcall, mark the corresponding
1126 tree EXPR as addressable. */
1127 tree y_expr = MEM_EXPR (y);
1128 tree x_expr = MEM_EXPR (x);
1129 if (y_expr)
1130 mark_addressable (y_expr);
1131 if (x_expr)
1132 mark_addressable (x_expr);
1133 retval = emit_block_copy_via_libcall (x, y, size,
1134 method == BLOCK_OP_TAILCALL);
1137 else
1138 emit_block_move_via_loop (x, y, size, align);
1140 if (method == BLOCK_OP_CALL_PARM)
1141 OK_DEFER_POP;
1143 return retval;
1147 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1149 unsigned HOST_WIDE_INT max, min = 0;
1150 if (GET_CODE (size) == CONST_INT)
1151 min = max = UINTVAL (size);
1152 else
1153 max = GET_MODE_MASK (GET_MODE (size));
1154 return emit_block_move_hints (x, y, size, method, 0, -1,
1155 min, max, max);
1158 /* A subroutine of emit_block_move. Returns true if calling the
1159 block move libcall will not clobber any parameters which may have
1160 already been placed on the stack. */
1162 static bool
1163 block_move_libcall_safe_for_call_parm (void)
1165 #if defined (REG_PARM_STACK_SPACE)
1166 tree fn;
1167 #endif
1169 /* If arguments are pushed on the stack, then they're safe. */
1170 if (PUSH_ARGS)
1171 return true;
1173 /* If registers go on the stack anyway, any argument is sure to clobber
1174 an outgoing argument. */
1175 #if defined (REG_PARM_STACK_SPACE)
1176 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1177 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1178 depend on its argument. */
1179 (void) fn;
1180 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1181 && REG_PARM_STACK_SPACE (fn) != 0)
1182 return false;
1183 #endif
1185 /* If any argument goes in memory, then it might clobber an outgoing
1186 argument. */
1188 CUMULATIVE_ARGS args_so_far_v;
1189 cumulative_args_t args_so_far;
1190 tree fn, arg;
1192 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1193 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1194 args_so_far = pack_cumulative_args (&args_so_far_v);
1196 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1197 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1199 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1200 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1201 NULL_TREE, true);
1202 if (!tmp || !REG_P (tmp))
1203 return false;
1204 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1205 return false;
1206 targetm.calls.function_arg_advance (args_so_far, mode,
1207 NULL_TREE, true);
1210 return true;
1213 /* A subroutine of emit_block_move. Expand a movmem pattern;
1214 return true if successful. */
1216 static bool
1217 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1218 unsigned int expected_align, HOST_WIDE_INT expected_size,
1219 unsigned HOST_WIDE_INT min_size,
1220 unsigned HOST_WIDE_INT max_size,
1221 unsigned HOST_WIDE_INT probable_max_size)
1223 int save_volatile_ok = volatile_ok;
1224 machine_mode mode;
1226 if (expected_align < align)
1227 expected_align = align;
1228 if (expected_size != -1)
1230 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1231 expected_size = probable_max_size;
1232 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1233 expected_size = min_size;
1236 /* Since this is a move insn, we don't care about volatility. */
1237 volatile_ok = 1;
1239 /* Try the most limited insn first, because there's no point
1240 including more than one in the machine description unless
1241 the more limited one has some advantage. */
1243 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1244 mode = GET_MODE_WIDER_MODE (mode))
1246 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1248 if (code != CODE_FOR_nothing
1249 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1250 here because if SIZE is less than the mode mask, as it is
1251 returned by the macro, it will definitely be less than the
1252 actual mode mask. Since SIZE is within the Pmode address
1253 space, we limit MODE to Pmode. */
1254 && ((CONST_INT_P (size)
1255 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1256 <= (GET_MODE_MASK (mode) >> 1)))
1257 || max_size <= (GET_MODE_MASK (mode) >> 1)
1258 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1260 struct expand_operand ops[9];
1261 unsigned int nops;
1263 /* ??? When called via emit_block_move_for_call, it'd be
1264 nice if there were some way to inform the backend, so
1265 that it doesn't fail the expansion because it thinks
1266 emitting the libcall would be more efficient. */
1267 nops = insn_data[(int) code].n_generator_args;
1268 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1270 create_fixed_operand (&ops[0], x);
1271 create_fixed_operand (&ops[1], y);
1272 /* The check above guarantees that this size conversion is valid. */
1273 create_convert_operand_to (&ops[2], size, mode, true);
1274 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1275 if (nops >= 6)
1277 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1278 create_integer_operand (&ops[5], expected_size);
1280 if (nops >= 8)
1282 create_integer_operand (&ops[6], min_size);
1283 /* If we can not represent the maximal size,
1284 make parameter NULL. */
1285 if ((HOST_WIDE_INT) max_size != -1)
1286 create_integer_operand (&ops[7], max_size);
1287 else
1288 create_fixed_operand (&ops[7], NULL);
1290 if (nops == 9)
1292 /* If we can not represent the maximal size,
1293 make parameter NULL. */
1294 if ((HOST_WIDE_INT) probable_max_size != -1)
1295 create_integer_operand (&ops[8], probable_max_size);
1296 else
1297 create_fixed_operand (&ops[8], NULL);
1299 if (maybe_expand_insn (code, nops, ops))
1301 volatile_ok = save_volatile_ok;
1302 return true;
1307 volatile_ok = save_volatile_ok;
1308 return false;
1311 /* A subroutine of emit_block_move. Copy the data via an explicit
1312 loop. This is used only when libcalls are forbidden. */
1313 /* ??? It'd be nice to copy in hunks larger than QImode. */
1315 static void
1316 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1317 unsigned int align ATTRIBUTE_UNUSED)
1319 rtx_code_label *cmp_label, *top_label;
1320 rtx iter, x_addr, y_addr, tmp;
1321 machine_mode x_addr_mode = get_address_mode (x);
1322 machine_mode y_addr_mode = get_address_mode (y);
1323 machine_mode iter_mode;
1325 iter_mode = GET_MODE (size);
1326 if (iter_mode == VOIDmode)
1327 iter_mode = word_mode;
1329 top_label = gen_label_rtx ();
1330 cmp_label = gen_label_rtx ();
1331 iter = gen_reg_rtx (iter_mode);
1333 emit_move_insn (iter, const0_rtx);
1335 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1336 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1337 do_pending_stack_adjust ();
1339 emit_jump (cmp_label);
1340 emit_label (top_label);
1342 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1343 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1345 if (x_addr_mode != y_addr_mode)
1346 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1347 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1349 x = change_address (x, QImode, x_addr);
1350 y = change_address (y, QImode, y_addr);
1352 emit_move_insn (x, y);
1354 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1355 true, OPTAB_LIB_WIDEN);
1356 if (tmp != iter)
1357 emit_move_insn (iter, tmp);
1359 emit_label (cmp_label);
1361 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1362 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1365 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1366 TAILCALL is true if this is a tail call. */
1369 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1370 rtx size, bool tailcall)
1372 rtx dst_addr, src_addr;
1373 tree call_expr, dst_tree, src_tree, size_tree;
1374 machine_mode size_mode;
1376 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1377 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1378 dst_tree = make_tree (ptr_type_node, dst_addr);
1380 src_addr = copy_addr_to_reg (XEXP (src, 0));
1381 src_addr = convert_memory_address (ptr_mode, src_addr);
1382 src_tree = make_tree (ptr_type_node, src_addr);
1384 size_mode = TYPE_MODE (sizetype);
1385 size = convert_to_mode (size_mode, size, 1);
1386 size = copy_to_mode_reg (size_mode, size);
1387 size_tree = make_tree (sizetype, size);
1389 /* It is incorrect to use the libcall calling conventions for calls to
1390 memcpy/memmove/memcmp because they can be provided by the user. */
1391 tree fn = builtin_decl_implicit (fncode);
1392 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1393 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1395 return expand_call (call_expr, NULL_RTX, false);
1398 /* Copy all or part of a value X into registers starting at REGNO.
1399 The number of registers to be filled is NREGS. */
1401 void
1402 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1404 if (nregs == 0)
1405 return;
1407 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1408 x = validize_mem (force_const_mem (mode, x));
1410 /* See if the machine can do this with a load multiple insn. */
1411 if (targetm.have_load_multiple ())
1413 rtx_insn *last = get_last_insn ();
1414 rtx first = gen_rtx_REG (word_mode, regno);
1415 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1416 GEN_INT (nregs)))
1418 emit_insn (pat);
1419 return;
1421 else
1422 delete_insns_since (last);
1425 for (int i = 0; i < nregs; i++)
1426 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1427 operand_subword_force (x, i, mode));
1430 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1431 The number of registers to be filled is NREGS. */
1433 void
1434 move_block_from_reg (int regno, rtx x, int nregs)
1436 if (nregs == 0)
1437 return;
1439 /* See if the machine can do this with a store multiple insn. */
1440 if (targetm.have_store_multiple ())
1442 rtx_insn *last = get_last_insn ();
1443 rtx first = gen_rtx_REG (word_mode, regno);
1444 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1445 GEN_INT (nregs)))
1447 emit_insn (pat);
1448 return;
1450 else
1451 delete_insns_since (last);
1454 for (int i = 0; i < nregs; i++)
1456 rtx tem = operand_subword (x, i, 1, BLKmode);
1458 gcc_assert (tem);
1460 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1464 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1465 ORIG, where ORIG is a non-consecutive group of registers represented by
1466 a PARALLEL. The clone is identical to the original except in that the
1467 original set of registers is replaced by a new set of pseudo registers.
1468 The new set has the same modes as the original set. */
1471 gen_group_rtx (rtx orig)
1473 int i, length;
1474 rtx *tmps;
1476 gcc_assert (GET_CODE (orig) == PARALLEL);
1478 length = XVECLEN (orig, 0);
1479 tmps = XALLOCAVEC (rtx, length);
1481 /* Skip a NULL entry in first slot. */
1482 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1484 if (i)
1485 tmps[0] = 0;
1487 for (; i < length; i++)
1489 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1490 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1492 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1495 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1498 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1499 except that values are placed in TMPS[i], and must later be moved
1500 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1502 static void
1503 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1505 rtx src;
1506 int start, i;
1507 machine_mode m = GET_MODE (orig_src);
1509 gcc_assert (GET_CODE (dst) == PARALLEL);
1511 if (m != VOIDmode
1512 && !SCALAR_INT_MODE_P (m)
1513 && !MEM_P (orig_src)
1514 && GET_CODE (orig_src) != CONCAT)
1516 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1517 if (imode == BLKmode)
1518 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1519 else
1520 src = gen_reg_rtx (imode);
1521 if (imode != BLKmode)
1522 src = gen_lowpart (GET_MODE (orig_src), src);
1523 emit_move_insn (src, orig_src);
1524 /* ...and back again. */
1525 if (imode != BLKmode)
1526 src = gen_lowpart (imode, src);
1527 emit_group_load_1 (tmps, dst, src, type, ssize);
1528 return;
1531 /* Check for a NULL entry, used to indicate that the parameter goes
1532 both on the stack and in registers. */
1533 if (XEXP (XVECEXP (dst, 0, 0), 0))
1534 start = 0;
1535 else
1536 start = 1;
1538 /* Process the pieces. */
1539 for (i = start; i < XVECLEN (dst, 0); i++)
1541 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1542 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1543 unsigned int bytelen = GET_MODE_SIZE (mode);
1544 int shift = 0;
1546 /* Handle trailing fragments that run over the size of the struct. */
1547 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1549 /* Arrange to shift the fragment to where it belongs.
1550 extract_bit_field loads to the lsb of the reg. */
1551 if (
1552 #ifdef BLOCK_REG_PADDING
1553 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1554 == (BYTES_BIG_ENDIAN ? upward : downward)
1555 #else
1556 BYTES_BIG_ENDIAN
1557 #endif
1559 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1560 bytelen = ssize - bytepos;
1561 gcc_assert (bytelen > 0);
1564 /* If we won't be loading directly from memory, protect the real source
1565 from strange tricks we might play; but make sure that the source can
1566 be loaded directly into the destination. */
1567 src = orig_src;
1568 if (!MEM_P (orig_src)
1569 && (!CONSTANT_P (orig_src)
1570 || (GET_MODE (orig_src) != mode
1571 && GET_MODE (orig_src) != VOIDmode)))
1573 if (GET_MODE (orig_src) == VOIDmode)
1574 src = gen_reg_rtx (mode);
1575 else
1576 src = gen_reg_rtx (GET_MODE (orig_src));
1578 emit_move_insn (src, orig_src);
1581 /* Optimize the access just a bit. */
1582 if (MEM_P (src)
1583 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1584 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1585 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1586 && bytelen == GET_MODE_SIZE (mode))
1588 tmps[i] = gen_reg_rtx (mode);
1589 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1591 else if (COMPLEX_MODE_P (mode)
1592 && GET_MODE (src) == mode
1593 && bytelen == GET_MODE_SIZE (mode))
1594 /* Let emit_move_complex do the bulk of the work. */
1595 tmps[i] = src;
1596 else if (GET_CODE (src) == CONCAT)
1598 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1599 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1601 if ((bytepos == 0 && bytelen == slen0)
1602 || (bytepos != 0 && bytepos + bytelen <= slen))
1604 /* The following assumes that the concatenated objects all
1605 have the same size. In this case, a simple calculation
1606 can be used to determine the object and the bit field
1607 to be extracted. */
1608 tmps[i] = XEXP (src, bytepos / slen0);
1609 if (! CONSTANT_P (tmps[i])
1610 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1611 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1612 (bytepos % slen0) * BITS_PER_UNIT,
1613 1, NULL_RTX, mode, mode, false);
1615 else
1617 rtx mem;
1619 gcc_assert (!bytepos);
1620 mem = assign_stack_temp (GET_MODE (src), slen);
1621 emit_move_insn (mem, src);
1622 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1623 0, 1, NULL_RTX, mode, mode, false);
1626 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1627 SIMD register, which is currently broken. While we get GCC
1628 to emit proper RTL for these cases, let's dump to memory. */
1629 else if (VECTOR_MODE_P (GET_MODE (dst))
1630 && REG_P (src))
1632 int slen = GET_MODE_SIZE (GET_MODE (src));
1633 rtx mem;
1635 mem = assign_stack_temp (GET_MODE (src), slen);
1636 emit_move_insn (mem, src);
1637 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1639 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1640 && XVECLEN (dst, 0) > 1)
1641 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1642 else if (CONSTANT_P (src))
1644 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1646 if (len == ssize)
1647 tmps[i] = src;
1648 else
1650 rtx first, second;
1652 /* TODO: const_wide_int can have sizes other than this... */
1653 gcc_assert (2 * len == ssize);
1654 split_double (src, &first, &second);
1655 if (i)
1656 tmps[i] = second;
1657 else
1658 tmps[i] = first;
1661 else if (REG_P (src) && GET_MODE (src) == mode)
1662 tmps[i] = src;
1663 else
1664 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1665 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1666 mode, mode, false);
1668 if (shift)
1669 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1670 shift, tmps[i], 0);
1674 /* Emit code to move a block SRC of type TYPE to a block DST,
1675 where DST is non-consecutive registers represented by a PARALLEL.
1676 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1677 if not known. */
1679 void
1680 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1682 rtx *tmps;
1683 int i;
1685 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1686 emit_group_load_1 (tmps, dst, src, type, ssize);
1688 /* Copy the extracted pieces into the proper (probable) hard regs. */
1689 for (i = 0; i < XVECLEN (dst, 0); i++)
1691 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1692 if (d == NULL)
1693 continue;
1694 emit_move_insn (d, tmps[i]);
1698 /* Similar, but load SRC into new pseudos in a format that looks like
1699 PARALLEL. This can later be fed to emit_group_move to get things
1700 in the right place. */
1703 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1705 rtvec vec;
1706 int i;
1708 vec = rtvec_alloc (XVECLEN (parallel, 0));
1709 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1711 /* Convert the vector to look just like the original PARALLEL, except
1712 with the computed values. */
1713 for (i = 0; i < XVECLEN (parallel, 0); i++)
1715 rtx e = XVECEXP (parallel, 0, i);
1716 rtx d = XEXP (e, 0);
1718 if (d)
1720 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1721 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1723 RTVEC_ELT (vec, i) = e;
1726 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1729 /* Emit code to move a block SRC to block DST, where SRC and DST are
1730 non-consecutive groups of registers, each represented by a PARALLEL. */
1732 void
1733 emit_group_move (rtx dst, rtx src)
1735 int i;
1737 gcc_assert (GET_CODE (src) == PARALLEL
1738 && GET_CODE (dst) == PARALLEL
1739 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1741 /* Skip first entry if NULL. */
1742 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1743 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1744 XEXP (XVECEXP (src, 0, i), 0));
1747 /* Move a group of registers represented by a PARALLEL into pseudos. */
1750 emit_group_move_into_temps (rtx src)
1752 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1753 int i;
1755 for (i = 0; i < XVECLEN (src, 0); i++)
1757 rtx e = XVECEXP (src, 0, i);
1758 rtx d = XEXP (e, 0);
1760 if (d)
1761 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1762 RTVEC_ELT (vec, i) = e;
1765 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1768 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1769 where SRC is non-consecutive registers represented by a PARALLEL.
1770 SSIZE represents the total size of block ORIG_DST, or -1 if not
1771 known. */
1773 void
1774 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1776 rtx *tmps, dst;
1777 int start, finish, i;
1778 machine_mode m = GET_MODE (orig_dst);
1780 gcc_assert (GET_CODE (src) == PARALLEL);
1782 if (!SCALAR_INT_MODE_P (m)
1783 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1785 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1786 if (imode == BLKmode)
1787 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1788 else
1789 dst = gen_reg_rtx (imode);
1790 emit_group_store (dst, src, type, ssize);
1791 if (imode != BLKmode)
1792 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1793 emit_move_insn (orig_dst, dst);
1794 return;
1797 /* Check for a NULL entry, used to indicate that the parameter goes
1798 both on the stack and in registers. */
1799 if (XEXP (XVECEXP (src, 0, 0), 0))
1800 start = 0;
1801 else
1802 start = 1;
1803 finish = XVECLEN (src, 0);
1805 tmps = XALLOCAVEC (rtx, finish);
1807 /* Copy the (probable) hard regs into pseudos. */
1808 for (i = start; i < finish; i++)
1810 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1811 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1813 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1814 emit_move_insn (tmps[i], reg);
1816 else
1817 tmps[i] = reg;
1820 /* If we won't be storing directly into memory, protect the real destination
1821 from strange tricks we might play. */
1822 dst = orig_dst;
1823 if (GET_CODE (dst) == PARALLEL)
1825 rtx temp;
1827 /* We can get a PARALLEL dst if there is a conditional expression in
1828 a return statement. In that case, the dst and src are the same,
1829 so no action is necessary. */
1830 if (rtx_equal_p (dst, src))
1831 return;
1833 /* It is unclear if we can ever reach here, but we may as well handle
1834 it. Allocate a temporary, and split this into a store/load to/from
1835 the temporary. */
1836 temp = assign_stack_temp (GET_MODE (dst), ssize);
1837 emit_group_store (temp, src, type, ssize);
1838 emit_group_load (dst, temp, type, ssize);
1839 return;
1841 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1843 machine_mode outer = GET_MODE (dst);
1844 machine_mode inner;
1845 HOST_WIDE_INT bytepos;
1846 bool done = false;
1847 rtx temp;
1849 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1850 dst = gen_reg_rtx (outer);
1852 /* Make life a bit easier for combine. */
1853 /* If the first element of the vector is the low part
1854 of the destination mode, use a paradoxical subreg to
1855 initialize the destination. */
1856 if (start < finish)
1858 inner = GET_MODE (tmps[start]);
1859 bytepos = subreg_lowpart_offset (inner, outer);
1860 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1862 temp = simplify_gen_subreg (outer, tmps[start],
1863 inner, 0);
1864 if (temp)
1866 emit_move_insn (dst, temp);
1867 done = true;
1868 start++;
1873 /* If the first element wasn't the low part, try the last. */
1874 if (!done
1875 && start < finish - 1)
1877 inner = GET_MODE (tmps[finish - 1]);
1878 bytepos = subreg_lowpart_offset (inner, outer);
1879 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1881 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1882 inner, 0);
1883 if (temp)
1885 emit_move_insn (dst, temp);
1886 done = true;
1887 finish--;
1892 /* Otherwise, simply initialize the result to zero. */
1893 if (!done)
1894 emit_move_insn (dst, CONST0_RTX (outer));
1897 /* Process the pieces. */
1898 for (i = start; i < finish; i++)
1900 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1901 machine_mode mode = GET_MODE (tmps[i]);
1902 unsigned int bytelen = GET_MODE_SIZE (mode);
1903 unsigned int adj_bytelen;
1904 rtx dest = dst;
1906 /* Handle trailing fragments that run over the size of the struct. */
1907 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1908 adj_bytelen = ssize - bytepos;
1909 else
1910 adj_bytelen = bytelen;
1912 if (GET_CODE (dst) == CONCAT)
1914 if (bytepos + adj_bytelen
1915 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1916 dest = XEXP (dst, 0);
1917 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1919 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1920 dest = XEXP (dst, 1);
1922 else
1924 machine_mode dest_mode = GET_MODE (dest);
1925 machine_mode tmp_mode = GET_MODE (tmps[i]);
1927 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1929 if (GET_MODE_ALIGNMENT (dest_mode)
1930 >= GET_MODE_ALIGNMENT (tmp_mode))
1932 dest = assign_stack_temp (dest_mode,
1933 GET_MODE_SIZE (dest_mode));
1934 emit_move_insn (adjust_address (dest,
1935 tmp_mode,
1936 bytepos),
1937 tmps[i]);
1938 dst = dest;
1940 else
1942 dest = assign_stack_temp (tmp_mode,
1943 GET_MODE_SIZE (tmp_mode));
1944 emit_move_insn (dest, tmps[i]);
1945 dst = adjust_address (dest, dest_mode, bytepos);
1947 break;
1951 /* Handle trailing fragments that run over the size of the struct. */
1952 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1954 /* store_bit_field always takes its value from the lsb.
1955 Move the fragment to the lsb if it's not already there. */
1956 if (
1957 #ifdef BLOCK_REG_PADDING
1958 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1959 == (BYTES_BIG_ENDIAN ? upward : downward)
1960 #else
1961 BYTES_BIG_ENDIAN
1962 #endif
1965 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1966 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1967 shift, tmps[i], 0);
1970 /* Make sure not to write past the end of the struct. */
1971 store_bit_field (dest,
1972 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1973 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
1974 VOIDmode, tmps[i], false);
1977 /* Optimize the access just a bit. */
1978 else if (MEM_P (dest)
1979 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1980 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1981 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1982 && bytelen == GET_MODE_SIZE (mode))
1983 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1985 else
1986 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1987 0, 0, mode, tmps[i], false);
1990 /* Copy from the pseudo into the (probable) hard reg. */
1991 if (orig_dst != dst)
1992 emit_move_insn (orig_dst, dst);
1995 /* Return a form of X that does not use a PARALLEL. TYPE is the type
1996 of the value stored in X. */
1999 maybe_emit_group_store (rtx x, tree type)
2001 machine_mode mode = TYPE_MODE (type);
2002 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2003 if (GET_CODE (x) == PARALLEL)
2005 rtx result = gen_reg_rtx (mode);
2006 emit_group_store (result, x, type, int_size_in_bytes (type));
2007 return result;
2009 return x;
2012 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2014 This is used on targets that return BLKmode values in registers. */
2016 void
2017 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2019 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2020 rtx src = NULL, dst = NULL;
2021 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2022 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2023 machine_mode mode = GET_MODE (srcreg);
2024 machine_mode tmode = GET_MODE (target);
2025 machine_mode copy_mode;
2027 /* BLKmode registers created in the back-end shouldn't have survived. */
2028 gcc_assert (mode != BLKmode);
2030 /* If the structure doesn't take up a whole number of words, see whether
2031 SRCREG is padded on the left or on the right. If it's on the left,
2032 set PADDING_CORRECTION to the number of bits to skip.
2034 In most ABIs, the structure will be returned at the least end of
2035 the register, which translates to right padding on little-endian
2036 targets and left padding on big-endian targets. The opposite
2037 holds if the structure is returned at the most significant
2038 end of the register. */
2039 if (bytes % UNITS_PER_WORD != 0
2040 && (targetm.calls.return_in_msb (type)
2041 ? !BYTES_BIG_ENDIAN
2042 : BYTES_BIG_ENDIAN))
2043 padding_correction
2044 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2046 /* We can use a single move if we have an exact mode for the size. */
2047 else if (MEM_P (target)
2048 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2049 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2050 && bytes == GET_MODE_SIZE (mode))
2052 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2053 return;
2056 /* And if we additionally have the same mode for a register. */
2057 else if (REG_P (target)
2058 && GET_MODE (target) == mode
2059 && bytes == GET_MODE_SIZE (mode))
2061 emit_move_insn (target, srcreg);
2062 return;
2065 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2066 into a new pseudo which is a full word. */
2067 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2069 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2070 mode = word_mode;
2073 /* Copy the structure BITSIZE bits at a time. If the target lives in
2074 memory, take care of not reading/writing past its end by selecting
2075 a copy mode suited to BITSIZE. This should always be possible given
2076 how it is computed.
2078 If the target lives in register, make sure not to select a copy mode
2079 larger than the mode of the register.
2081 We could probably emit more efficient code for machines which do not use
2082 strict alignment, but it doesn't seem worth the effort at the current
2083 time. */
2085 copy_mode = word_mode;
2086 if (MEM_P (target))
2088 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2089 if (mem_mode != BLKmode)
2090 copy_mode = mem_mode;
2092 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2093 copy_mode = tmode;
2095 for (bitpos = 0, xbitpos = padding_correction;
2096 bitpos < bytes * BITS_PER_UNIT;
2097 bitpos += bitsize, xbitpos += bitsize)
2099 /* We need a new source operand each time xbitpos is on a
2100 word boundary and when xbitpos == padding_correction
2101 (the first time through). */
2102 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2103 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2105 /* We need a new destination operand each time bitpos is on
2106 a word boundary. */
2107 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2108 dst = target;
2109 else if (bitpos % BITS_PER_WORD == 0)
2110 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2112 /* Use xbitpos for the source extraction (right justified) and
2113 bitpos for the destination store (left justified). */
2114 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2115 extract_bit_field (src, bitsize,
2116 xbitpos % BITS_PER_WORD, 1,
2117 NULL_RTX, copy_mode, copy_mode,
2118 false),
2119 false);
2123 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2124 register if it contains any data, otherwise return null.
2126 This is used on targets that return BLKmode values in registers. */
2129 copy_blkmode_to_reg (machine_mode mode, tree src)
2131 int i, n_regs;
2132 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2133 unsigned int bitsize;
2134 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2135 machine_mode dst_mode;
2137 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2139 x = expand_normal (src);
2141 bytes = int_size_in_bytes (TREE_TYPE (src));
2142 if (bytes == 0)
2143 return NULL_RTX;
2145 /* If the structure doesn't take up a whole number of words, see
2146 whether the register value should be padded on the left or on
2147 the right. Set PADDING_CORRECTION to the number of padding
2148 bits needed on the left side.
2150 In most ABIs, the structure will be returned at the least end of
2151 the register, which translates to right padding on little-endian
2152 targets and left padding on big-endian targets. The opposite
2153 holds if the structure is returned at the most significant
2154 end of the register. */
2155 if (bytes % UNITS_PER_WORD != 0
2156 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2157 ? !BYTES_BIG_ENDIAN
2158 : BYTES_BIG_ENDIAN))
2159 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2160 * BITS_PER_UNIT));
2162 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2163 dst_words = XALLOCAVEC (rtx, n_regs);
2164 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2166 /* Copy the structure BITSIZE bits at a time. */
2167 for (bitpos = 0, xbitpos = padding_correction;
2168 bitpos < bytes * BITS_PER_UNIT;
2169 bitpos += bitsize, xbitpos += bitsize)
2171 /* We need a new destination pseudo each time xbitpos is
2172 on a word boundary and when xbitpos == padding_correction
2173 (the first time through). */
2174 if (xbitpos % BITS_PER_WORD == 0
2175 || xbitpos == padding_correction)
2177 /* Generate an appropriate register. */
2178 dst_word = gen_reg_rtx (word_mode);
2179 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2181 /* Clear the destination before we move anything into it. */
2182 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2185 /* We need a new source operand each time bitpos is on a word
2186 boundary. */
2187 if (bitpos % BITS_PER_WORD == 0)
2188 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2190 /* Use bitpos for the source extraction (left justified) and
2191 xbitpos for the destination store (right justified). */
2192 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2193 0, 0, word_mode,
2194 extract_bit_field (src_word, bitsize,
2195 bitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, word_mode, word_mode,
2197 false),
2198 false);
2201 if (mode == BLKmode)
2203 /* Find the smallest integer mode large enough to hold the
2204 entire structure. */
2205 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2206 mode != VOIDmode;
2207 mode = GET_MODE_WIDER_MODE (mode))
2208 /* Have we found a large enough mode? */
2209 if (GET_MODE_SIZE (mode) >= bytes)
2210 break;
2212 /* A suitable mode should have been found. */
2213 gcc_assert (mode != VOIDmode);
2216 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2217 dst_mode = word_mode;
2218 else
2219 dst_mode = mode;
2220 dst = gen_reg_rtx (dst_mode);
2222 for (i = 0; i < n_regs; i++)
2223 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2225 if (mode != dst_mode)
2226 dst = gen_lowpart (mode, dst);
2228 return dst;
2231 /* Add a USE expression for REG to the (possibly empty) list pointed
2232 to by CALL_FUSAGE. REG must denote a hard register. */
2234 void
2235 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2237 gcc_assert (REG_P (reg));
2239 if (!HARD_REGISTER_P (reg))
2240 return;
2242 *call_fusage
2243 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2246 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2247 to by CALL_FUSAGE. REG must denote a hard register. */
2249 void
2250 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2252 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2254 *call_fusage
2255 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2258 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2259 starting at REGNO. All of these registers must be hard registers. */
2261 void
2262 use_regs (rtx *call_fusage, int regno, int nregs)
2264 int i;
2266 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2268 for (i = 0; i < nregs; i++)
2269 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2272 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2273 PARALLEL REGS. This is for calls that pass values in multiple
2274 non-contiguous locations. The Irix 6 ABI has examples of this. */
2276 void
2277 use_group_regs (rtx *call_fusage, rtx regs)
2279 int i;
2281 for (i = 0; i < XVECLEN (regs, 0); i++)
2283 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2285 /* A NULL entry means the parameter goes both on the stack and in
2286 registers. This can also be a MEM for targets that pass values
2287 partially on the stack and partially in registers. */
2288 if (reg != 0 && REG_P (reg))
2289 use_reg (call_fusage, reg);
2293 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2294 assigment and the code of the expresion on the RHS is CODE. Return
2295 NULL otherwise. */
2297 static gimple *
2298 get_def_for_expr (tree name, enum tree_code code)
2300 gimple *def_stmt;
2302 if (TREE_CODE (name) != SSA_NAME)
2303 return NULL;
2305 def_stmt = get_gimple_for_ssa_name (name);
2306 if (!def_stmt
2307 || gimple_assign_rhs_code (def_stmt) != code)
2308 return NULL;
2310 return def_stmt;
2313 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2314 assigment and the class of the expresion on the RHS is CLASS. Return
2315 NULL otherwise. */
2317 static gimple *
2318 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2320 gimple *def_stmt;
2322 if (TREE_CODE (name) != SSA_NAME)
2323 return NULL;
2325 def_stmt = get_gimple_for_ssa_name (name);
2326 if (!def_stmt
2327 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2328 return NULL;
2330 return def_stmt;
2334 /* Determine whether the LEN bytes generated by CONSTFUN can be
2335 stored to memory using several move instructions. CONSTFUNDATA is
2336 a pointer which will be passed as argument in every CONSTFUN call.
2337 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2338 a memset operation and false if it's a copy of a constant string.
2339 Return nonzero if a call to store_by_pieces should succeed. */
2342 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2343 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2344 void *constfundata, unsigned int align, bool memsetp)
2346 unsigned HOST_WIDE_INT l;
2347 unsigned int max_size;
2348 HOST_WIDE_INT offset = 0;
2349 machine_mode mode;
2350 enum insn_code icode;
2351 int reverse;
2352 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2353 rtx cst ATTRIBUTE_UNUSED;
2355 if (len == 0)
2356 return 1;
2358 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2359 memsetp
2360 ? SET_BY_PIECES
2361 : STORE_BY_PIECES,
2362 optimize_insn_for_speed_p ()))
2363 return 0;
2365 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2367 /* We would first store what we can in the largest integer mode, then go to
2368 successively smaller modes. */
2370 for (reverse = 0;
2371 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2372 reverse++)
2374 l = len;
2375 max_size = STORE_MAX_PIECES + 1;
2376 while (max_size > 1 && l > 0)
2378 mode = widest_int_mode_for_size (max_size);
2380 if (mode == VOIDmode)
2381 break;
2383 icode = optab_handler (mov_optab, mode);
2384 if (icode != CODE_FOR_nothing
2385 && align >= GET_MODE_ALIGNMENT (mode))
2387 unsigned int size = GET_MODE_SIZE (mode);
2389 while (l >= size)
2391 if (reverse)
2392 offset -= size;
2394 cst = (*constfun) (constfundata, offset, mode);
2395 if (!targetm.legitimate_constant_p (mode, cst))
2396 return 0;
2398 if (!reverse)
2399 offset += size;
2401 l -= size;
2405 max_size = GET_MODE_SIZE (mode);
2408 /* The code above should have handled everything. */
2409 gcc_assert (!l);
2412 return 1;
2415 /* Generate several move instructions to store LEN bytes generated by
2416 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2417 pointer which will be passed as argument in every CONSTFUN call.
2418 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2419 a memset operation and false if it's a copy of a constant string.
2420 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2421 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2422 stpcpy. */
2425 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2426 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2427 void *constfundata, unsigned int align, bool memsetp, int endp)
2429 machine_mode to_addr_mode = get_address_mode (to);
2430 struct store_by_pieces_d data;
2432 if (len == 0)
2434 gcc_assert (endp != 2);
2435 return to;
2438 gcc_assert (targetm.use_by_pieces_infrastructure_p
2439 (len, align,
2440 memsetp
2441 ? SET_BY_PIECES
2442 : STORE_BY_PIECES,
2443 optimize_insn_for_speed_p ()));
2445 data.constfun = constfun;
2446 data.constfundata = constfundata;
2447 data.len = len;
2448 data.to = to;
2449 store_by_pieces_1 (&data, align);
2450 if (endp)
2452 rtx to1;
2454 gcc_assert (!data.reverse);
2455 if (data.autinc_to)
2457 if (endp == 2)
2459 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2460 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2461 else
2462 data.to_addr = copy_to_mode_reg (to_addr_mode,
2463 plus_constant (to_addr_mode,
2464 data.to_addr,
2465 -1));
2467 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2468 data.offset);
2470 else
2472 if (endp == 2)
2473 --data.offset;
2474 to1 = adjust_address (data.to, QImode, data.offset);
2476 return to1;
2478 else
2479 return data.to;
2482 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2483 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2485 static void
2486 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2488 struct store_by_pieces_d data;
2490 if (len == 0)
2491 return;
2493 data.constfun = clear_by_pieces_1;
2494 data.constfundata = NULL;
2495 data.len = len;
2496 data.to = to;
2497 store_by_pieces_1 (&data, align);
2500 /* Callback routine for clear_by_pieces.
2501 Return const0_rtx unconditionally. */
2503 static rtx
2504 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2505 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2506 machine_mode mode ATTRIBUTE_UNUSED)
2508 return const0_rtx;
2511 /* Subroutine of clear_by_pieces and store_by_pieces.
2512 Generate several move instructions to store LEN bytes of block TO. (A MEM
2513 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2515 static void
2516 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2517 unsigned int align ATTRIBUTE_UNUSED)
2519 machine_mode to_addr_mode = get_address_mode (data->to);
2520 rtx to_addr = XEXP (data->to, 0);
2521 unsigned int max_size = STORE_MAX_PIECES + 1;
2522 enum insn_code icode;
2524 data->offset = 0;
2525 data->to_addr = to_addr;
2526 data->autinc_to
2527 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2528 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2530 data->explicit_inc_to = 0;
2531 data->reverse
2532 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2533 if (data->reverse)
2534 data->offset = data->len;
2536 /* If storing requires more than two move insns,
2537 copy addresses to registers (to make displacements shorter)
2538 and use post-increment if available. */
2539 if (!data->autinc_to
2540 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2542 /* Determine the main mode we'll be using.
2543 MODE might not be used depending on the definitions of the
2544 USE_* macros below. */
2545 machine_mode mode ATTRIBUTE_UNUSED
2546 = widest_int_mode_for_size (max_size);
2548 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2550 data->to_addr = copy_to_mode_reg (to_addr_mode,
2551 plus_constant (to_addr_mode,
2552 to_addr,
2553 data->len));
2554 data->autinc_to = 1;
2555 data->explicit_inc_to = -1;
2558 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2559 && ! data->autinc_to)
2561 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2562 data->autinc_to = 1;
2563 data->explicit_inc_to = 1;
2566 if ( !data->autinc_to && CONSTANT_P (to_addr))
2567 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2570 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2572 /* First store what we can in the largest integer mode, then go to
2573 successively smaller modes. */
2575 while (max_size > 1 && data->len > 0)
2577 machine_mode mode = widest_int_mode_for_size (max_size);
2579 if (mode == VOIDmode)
2580 break;
2582 icode = optab_handler (mov_optab, mode);
2583 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2584 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2586 max_size = GET_MODE_SIZE (mode);
2589 /* The code above should have handled everything. */
2590 gcc_assert (!data->len);
2593 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2594 with move instructions for mode MODE. GENFUN is the gen_... function
2595 to make a move insn for that mode. DATA has all the other info. */
2597 static void
2598 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2599 struct store_by_pieces_d *data)
2601 unsigned int size = GET_MODE_SIZE (mode);
2602 rtx to1, cst;
2604 while (data->len >= size)
2606 if (data->reverse)
2607 data->offset -= size;
2609 if (data->autinc_to)
2610 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2611 data->offset);
2612 else
2613 to1 = adjust_address (data->to, mode, data->offset);
2615 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2616 emit_insn (gen_add2_insn (data->to_addr,
2617 gen_int_mode (-(HOST_WIDE_INT) size,
2618 GET_MODE (data->to_addr))));
2620 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2621 emit_insn ((*genfun) (to1, cst));
2623 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2624 emit_insn (gen_add2_insn (data->to_addr,
2625 gen_int_mode (size,
2626 GET_MODE (data->to_addr))));
2628 if (! data->reverse)
2629 data->offset += size;
2631 data->len -= size;
2635 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2636 its length in bytes. */
2639 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2640 unsigned int expected_align, HOST_WIDE_INT expected_size,
2641 unsigned HOST_WIDE_INT min_size,
2642 unsigned HOST_WIDE_INT max_size,
2643 unsigned HOST_WIDE_INT probable_max_size)
2645 machine_mode mode = GET_MODE (object);
2646 unsigned int align;
2648 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2650 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2651 just move a zero. Otherwise, do this a piece at a time. */
2652 if (mode != BLKmode
2653 && CONST_INT_P (size)
2654 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2656 rtx zero = CONST0_RTX (mode);
2657 if (zero != NULL)
2659 emit_move_insn (object, zero);
2660 return NULL;
2663 if (COMPLEX_MODE_P (mode))
2665 zero = CONST0_RTX (GET_MODE_INNER (mode));
2666 if (zero != NULL)
2668 write_complex_part (object, zero, 0);
2669 write_complex_part (object, zero, 1);
2670 return NULL;
2675 if (size == const0_rtx)
2676 return NULL;
2678 align = MEM_ALIGN (object);
2680 if (CONST_INT_P (size)
2681 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2682 CLEAR_BY_PIECES,
2683 optimize_insn_for_speed_p ()))
2684 clear_by_pieces (object, INTVAL (size), align);
2685 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2686 expected_align, expected_size,
2687 min_size, max_size, probable_max_size))
2689 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2690 return set_storage_via_libcall (object, size, const0_rtx,
2691 method == BLOCK_OP_TAILCALL);
2692 else
2693 gcc_unreachable ();
2695 return NULL;
2699 clear_storage (rtx object, rtx size, enum block_op_methods method)
2701 unsigned HOST_WIDE_INT max, min = 0;
2702 if (GET_CODE (size) == CONST_INT)
2703 min = max = UINTVAL (size);
2704 else
2705 max = GET_MODE_MASK (GET_MODE (size));
2706 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2710 /* A subroutine of clear_storage. Expand a call to memset.
2711 Return the return value of memset, 0 otherwise. */
2714 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2716 tree call_expr, fn, object_tree, size_tree, val_tree;
2717 machine_mode size_mode;
2719 object = copy_addr_to_reg (XEXP (object, 0));
2720 object_tree = make_tree (ptr_type_node, object);
2722 if (!CONST_INT_P (val))
2723 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2724 val_tree = make_tree (integer_type_node, val);
2726 size_mode = TYPE_MODE (sizetype);
2727 size = convert_to_mode (size_mode, size, 1);
2728 size = copy_to_mode_reg (size_mode, size);
2729 size_tree = make_tree (sizetype, size);
2731 /* It is incorrect to use the libcall calling conventions for calls to
2732 memset because it can be provided by the user. */
2733 fn = builtin_decl_implicit (BUILT_IN_MEMSET);
2734 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2735 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2737 return expand_call (call_expr, NULL_RTX, false);
2740 /* Expand a setmem pattern; return true if successful. */
2742 bool
2743 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2744 unsigned int expected_align, HOST_WIDE_INT expected_size,
2745 unsigned HOST_WIDE_INT min_size,
2746 unsigned HOST_WIDE_INT max_size,
2747 unsigned HOST_WIDE_INT probable_max_size)
2749 /* Try the most limited insn first, because there's no point
2750 including more than one in the machine description unless
2751 the more limited one has some advantage. */
2753 machine_mode mode;
2755 if (expected_align < align)
2756 expected_align = align;
2757 if (expected_size != -1)
2759 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2760 expected_size = max_size;
2761 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2762 expected_size = min_size;
2765 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2766 mode = GET_MODE_WIDER_MODE (mode))
2768 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2770 if (code != CODE_FOR_nothing
2771 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2772 here because if SIZE is less than the mode mask, as it is
2773 returned by the macro, it will definitely be less than the
2774 actual mode mask. Since SIZE is within the Pmode address
2775 space, we limit MODE to Pmode. */
2776 && ((CONST_INT_P (size)
2777 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2778 <= (GET_MODE_MASK (mode) >> 1)))
2779 || max_size <= (GET_MODE_MASK (mode) >> 1)
2780 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2782 struct expand_operand ops[9];
2783 unsigned int nops;
2785 nops = insn_data[(int) code].n_generator_args;
2786 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2788 create_fixed_operand (&ops[0], object);
2789 /* The check above guarantees that this size conversion is valid. */
2790 create_convert_operand_to (&ops[1], size, mode, true);
2791 create_convert_operand_from (&ops[2], val, byte_mode, true);
2792 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2793 if (nops >= 6)
2795 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2796 create_integer_operand (&ops[5], expected_size);
2798 if (nops >= 8)
2800 create_integer_operand (&ops[6], min_size);
2801 /* If we can not represent the maximal size,
2802 make parameter NULL. */
2803 if ((HOST_WIDE_INT) max_size != -1)
2804 create_integer_operand (&ops[7], max_size);
2805 else
2806 create_fixed_operand (&ops[7], NULL);
2808 if (nops == 9)
2810 /* If we can not represent the maximal size,
2811 make parameter NULL. */
2812 if ((HOST_WIDE_INT) probable_max_size != -1)
2813 create_integer_operand (&ops[8], probable_max_size);
2814 else
2815 create_fixed_operand (&ops[8], NULL);
2817 if (maybe_expand_insn (code, nops, ops))
2818 return true;
2822 return false;
2826 /* Write to one of the components of the complex value CPLX. Write VAL to
2827 the real part if IMAG_P is false, and the imaginary part if its true. */
2829 void
2830 write_complex_part (rtx cplx, rtx val, bool imag_p)
2832 machine_mode cmode;
2833 machine_mode imode;
2834 unsigned ibitsize;
2836 if (GET_CODE (cplx) == CONCAT)
2838 emit_move_insn (XEXP (cplx, imag_p), val);
2839 return;
2842 cmode = GET_MODE (cplx);
2843 imode = GET_MODE_INNER (cmode);
2844 ibitsize = GET_MODE_BITSIZE (imode);
2846 /* For MEMs simplify_gen_subreg may generate an invalid new address
2847 because, e.g., the original address is considered mode-dependent
2848 by the target, which restricts simplify_subreg from invoking
2849 adjust_address_nv. Instead of preparing fallback support for an
2850 invalid address, we call adjust_address_nv directly. */
2851 if (MEM_P (cplx))
2853 emit_move_insn (adjust_address_nv (cplx, imode,
2854 imag_p ? GET_MODE_SIZE (imode) : 0),
2855 val);
2856 return;
2859 /* If the sub-object is at least word sized, then we know that subregging
2860 will work. This special case is important, since store_bit_field
2861 wants to operate on integer modes, and there's rarely an OImode to
2862 correspond to TCmode. */
2863 if (ibitsize >= BITS_PER_WORD
2864 /* For hard regs we have exact predicates. Assume we can split
2865 the original object if it spans an even number of hard regs.
2866 This special case is important for SCmode on 64-bit platforms
2867 where the natural size of floating-point regs is 32-bit. */
2868 || (REG_P (cplx)
2869 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2870 && REG_NREGS (cplx) % 2 == 0))
2872 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2873 imag_p ? GET_MODE_SIZE (imode) : 0);
2874 if (part)
2876 emit_move_insn (part, val);
2877 return;
2879 else
2880 /* simplify_gen_subreg may fail for sub-word MEMs. */
2881 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2884 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
2885 false);
2888 /* Extract one of the components of the complex value CPLX. Extract the
2889 real part if IMAG_P is false, and the imaginary part if it's true. */
2892 read_complex_part (rtx cplx, bool imag_p)
2894 machine_mode cmode, imode;
2895 unsigned ibitsize;
2897 if (GET_CODE (cplx) == CONCAT)
2898 return XEXP (cplx, imag_p);
2900 cmode = GET_MODE (cplx);
2901 imode = GET_MODE_INNER (cmode);
2902 ibitsize = GET_MODE_BITSIZE (imode);
2904 /* Special case reads from complex constants that got spilled to memory. */
2905 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2907 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2908 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2910 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2911 if (CONSTANT_CLASS_P (part))
2912 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2916 /* For MEMs simplify_gen_subreg may generate an invalid new address
2917 because, e.g., the original address is considered mode-dependent
2918 by the target, which restricts simplify_subreg from invoking
2919 adjust_address_nv. Instead of preparing fallback support for an
2920 invalid address, we call adjust_address_nv directly. */
2921 if (MEM_P (cplx))
2922 return adjust_address_nv (cplx, imode,
2923 imag_p ? GET_MODE_SIZE (imode) : 0);
2925 /* If the sub-object is at least word sized, then we know that subregging
2926 will work. This special case is important, since extract_bit_field
2927 wants to operate on integer modes, and there's rarely an OImode to
2928 correspond to TCmode. */
2929 if (ibitsize >= BITS_PER_WORD
2930 /* For hard regs we have exact predicates. Assume we can split
2931 the original object if it spans an even number of hard regs.
2932 This special case is important for SCmode on 64-bit platforms
2933 where the natural size of floating-point regs is 32-bit. */
2934 || (REG_P (cplx)
2935 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2936 && REG_NREGS (cplx) % 2 == 0))
2938 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2939 imag_p ? GET_MODE_SIZE (imode) : 0);
2940 if (ret)
2941 return ret;
2942 else
2943 /* simplify_gen_subreg may fail for sub-word MEMs. */
2944 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2947 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2948 true, NULL_RTX, imode, imode, false);
2951 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2952 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2953 represented in NEW_MODE. If FORCE is true, this will never happen, as
2954 we'll force-create a SUBREG if needed. */
2956 static rtx
2957 emit_move_change_mode (machine_mode new_mode,
2958 machine_mode old_mode, rtx x, bool force)
2960 rtx ret;
2962 if (push_operand (x, GET_MODE (x)))
2964 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2965 MEM_COPY_ATTRIBUTES (ret, x);
2967 else if (MEM_P (x))
2969 /* We don't have to worry about changing the address since the
2970 size in bytes is supposed to be the same. */
2971 if (reload_in_progress)
2973 /* Copy the MEM to change the mode and move any
2974 substitutions from the old MEM to the new one. */
2975 ret = adjust_address_nv (x, new_mode, 0);
2976 copy_replacements (x, ret);
2978 else
2979 ret = adjust_address (x, new_mode, 0);
2981 else
2983 /* Note that we do want simplify_subreg's behavior of validating
2984 that the new mode is ok for a hard register. If we were to use
2985 simplify_gen_subreg, we would create the subreg, but would
2986 probably run into the target not being able to implement it. */
2987 /* Except, of course, when FORCE is true, when this is exactly what
2988 we want. Which is needed for CCmodes on some targets. */
2989 if (force)
2990 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2991 else
2992 ret = simplify_subreg (new_mode, x, old_mode, 0);
2995 return ret;
2998 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2999 an integer mode of the same size as MODE. Returns the instruction
3000 emitted, or NULL if such a move could not be generated. */
3002 static rtx_insn *
3003 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3005 machine_mode imode;
3006 enum insn_code code;
3008 /* There must exist a mode of the exact size we require. */
3009 imode = int_mode_for_mode (mode);
3010 if (imode == BLKmode)
3011 return NULL;
3013 /* The target must support moves in this mode. */
3014 code = optab_handler (mov_optab, imode);
3015 if (code == CODE_FOR_nothing)
3016 return NULL;
3018 x = emit_move_change_mode (imode, mode, x, force);
3019 if (x == NULL_RTX)
3020 return NULL;
3021 y = emit_move_change_mode (imode, mode, y, force);
3022 if (y == NULL_RTX)
3023 return NULL;
3024 return emit_insn (GEN_FCN (code) (x, y));
3027 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3028 Return an equivalent MEM that does not use an auto-increment. */
3031 emit_move_resolve_push (machine_mode mode, rtx x)
3033 enum rtx_code code = GET_CODE (XEXP (x, 0));
3034 HOST_WIDE_INT adjust;
3035 rtx temp;
3037 adjust = GET_MODE_SIZE (mode);
3038 #ifdef PUSH_ROUNDING
3039 adjust = PUSH_ROUNDING (adjust);
3040 #endif
3041 if (code == PRE_DEC || code == POST_DEC)
3042 adjust = -adjust;
3043 else if (code == PRE_MODIFY || code == POST_MODIFY)
3045 rtx expr = XEXP (XEXP (x, 0), 1);
3046 HOST_WIDE_INT val;
3048 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3049 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3050 val = INTVAL (XEXP (expr, 1));
3051 if (GET_CODE (expr) == MINUS)
3052 val = -val;
3053 gcc_assert (adjust == val || adjust == -val);
3054 adjust = val;
3057 /* Do not use anti_adjust_stack, since we don't want to update
3058 stack_pointer_delta. */
3059 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3060 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3061 0, OPTAB_LIB_WIDEN);
3062 if (temp != stack_pointer_rtx)
3063 emit_move_insn (stack_pointer_rtx, temp);
3065 switch (code)
3067 case PRE_INC:
3068 case PRE_DEC:
3069 case PRE_MODIFY:
3070 temp = stack_pointer_rtx;
3071 break;
3072 case POST_INC:
3073 case POST_DEC:
3074 case POST_MODIFY:
3075 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3076 break;
3077 default:
3078 gcc_unreachable ();
3081 return replace_equiv_address (x, temp);
3084 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3085 X is known to satisfy push_operand, and MODE is known to be complex.
3086 Returns the last instruction emitted. */
3088 rtx_insn *
3089 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3091 machine_mode submode = GET_MODE_INNER (mode);
3092 bool imag_first;
3094 #ifdef PUSH_ROUNDING
3095 unsigned int submodesize = GET_MODE_SIZE (submode);
3097 /* In case we output to the stack, but the size is smaller than the
3098 machine can push exactly, we need to use move instructions. */
3099 if (PUSH_ROUNDING (submodesize) != submodesize)
3101 x = emit_move_resolve_push (mode, x);
3102 return emit_move_insn (x, y);
3104 #endif
3106 /* Note that the real part always precedes the imag part in memory
3107 regardless of machine's endianness. */
3108 switch (GET_CODE (XEXP (x, 0)))
3110 case PRE_DEC:
3111 case POST_DEC:
3112 imag_first = true;
3113 break;
3114 case PRE_INC:
3115 case POST_INC:
3116 imag_first = false;
3117 break;
3118 default:
3119 gcc_unreachable ();
3122 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3123 read_complex_part (y, imag_first));
3124 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3125 read_complex_part (y, !imag_first));
3128 /* A subroutine of emit_move_complex. Perform the move from Y to X
3129 via two moves of the parts. Returns the last instruction emitted. */
3131 rtx_insn *
3132 emit_move_complex_parts (rtx x, rtx y)
3134 /* Show the output dies here. This is necessary for SUBREGs
3135 of pseudos since we cannot track their lifetimes correctly;
3136 hard regs shouldn't appear here except as return values. */
3137 if (!reload_completed && !reload_in_progress
3138 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3139 emit_clobber (x);
3141 write_complex_part (x, read_complex_part (y, false), false);
3142 write_complex_part (x, read_complex_part (y, true), true);
3144 return get_last_insn ();
3147 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3148 MODE is known to be complex. Returns the last instruction emitted. */
3150 static rtx_insn *
3151 emit_move_complex (machine_mode mode, rtx x, rtx y)
3153 bool try_int;
3155 /* Need to take special care for pushes, to maintain proper ordering
3156 of the data, and possibly extra padding. */
3157 if (push_operand (x, mode))
3158 return emit_move_complex_push (mode, x, y);
3160 /* See if we can coerce the target into moving both values at once, except
3161 for floating point where we favor moving as parts if this is easy. */
3162 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3163 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3164 && !(REG_P (x)
3165 && HARD_REGISTER_P (x)
3166 && REG_NREGS (x) == 1)
3167 && !(REG_P (y)
3168 && HARD_REGISTER_P (y)
3169 && REG_NREGS (y) == 1))
3170 try_int = false;
3171 /* Not possible if the values are inherently not adjacent. */
3172 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3173 try_int = false;
3174 /* Is possible if both are registers (or subregs of registers). */
3175 else if (register_operand (x, mode) && register_operand (y, mode))
3176 try_int = true;
3177 /* If one of the operands is a memory, and alignment constraints
3178 are friendly enough, we may be able to do combined memory operations.
3179 We do not attempt this if Y is a constant because that combination is
3180 usually better with the by-parts thing below. */
3181 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3182 && (!STRICT_ALIGNMENT
3183 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3184 try_int = true;
3185 else
3186 try_int = false;
3188 if (try_int)
3190 rtx_insn *ret;
3192 /* For memory to memory moves, optimal behavior can be had with the
3193 existing block move logic. */
3194 if (MEM_P (x) && MEM_P (y))
3196 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3197 BLOCK_OP_NO_LIBCALL);
3198 return get_last_insn ();
3201 ret = emit_move_via_integer (mode, x, y, true);
3202 if (ret)
3203 return ret;
3206 return emit_move_complex_parts (x, y);
3209 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3210 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3212 static rtx_insn *
3213 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3215 rtx_insn *ret;
3217 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3218 if (mode != CCmode)
3220 enum insn_code code = optab_handler (mov_optab, CCmode);
3221 if (code != CODE_FOR_nothing)
3223 x = emit_move_change_mode (CCmode, mode, x, true);
3224 y = emit_move_change_mode (CCmode, mode, y, true);
3225 return emit_insn (GEN_FCN (code) (x, y));
3229 /* Otherwise, find the MODE_INT mode of the same width. */
3230 ret = emit_move_via_integer (mode, x, y, false);
3231 gcc_assert (ret != NULL);
3232 return ret;
3235 /* Return true if word I of OP lies entirely in the
3236 undefined bits of a paradoxical subreg. */
3238 static bool
3239 undefined_operand_subword_p (const_rtx op, int i)
3241 machine_mode innermode, innermostmode;
3242 int offset;
3243 if (GET_CODE (op) != SUBREG)
3244 return false;
3245 innermode = GET_MODE (op);
3246 innermostmode = GET_MODE (SUBREG_REG (op));
3247 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3248 /* The SUBREG_BYTE represents offset, as if the value were stored in
3249 memory, except for a paradoxical subreg where we define
3250 SUBREG_BYTE to be 0; undo this exception as in
3251 simplify_subreg. */
3252 if (SUBREG_BYTE (op) == 0
3253 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3255 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3256 if (WORDS_BIG_ENDIAN)
3257 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3258 if (BYTES_BIG_ENDIAN)
3259 offset += difference % UNITS_PER_WORD;
3261 if (offset >= GET_MODE_SIZE (innermostmode)
3262 || offset <= -GET_MODE_SIZE (word_mode))
3263 return true;
3264 return false;
3267 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3268 MODE is any multi-word or full-word mode that lacks a move_insn
3269 pattern. Note that you will get better code if you define such
3270 patterns, even if they must turn into multiple assembler instructions. */
3272 static rtx_insn *
3273 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3275 rtx_insn *last_insn = 0;
3276 rtx_insn *seq;
3277 rtx inner;
3278 bool need_clobber;
3279 int i;
3281 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3283 /* If X is a push on the stack, do the push now and replace
3284 X with a reference to the stack pointer. */
3285 if (push_operand (x, mode))
3286 x = emit_move_resolve_push (mode, x);
3288 /* If we are in reload, see if either operand is a MEM whose address
3289 is scheduled for replacement. */
3290 if (reload_in_progress && MEM_P (x)
3291 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3292 x = replace_equiv_address_nv (x, inner);
3293 if (reload_in_progress && MEM_P (y)
3294 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3295 y = replace_equiv_address_nv (y, inner);
3297 start_sequence ();
3299 need_clobber = false;
3300 for (i = 0;
3301 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3302 i++)
3304 rtx xpart = operand_subword (x, i, 1, mode);
3305 rtx ypart;
3307 /* Do not generate code for a move if it would come entirely
3308 from the undefined bits of a paradoxical subreg. */
3309 if (undefined_operand_subword_p (y, i))
3310 continue;
3312 ypart = operand_subword (y, i, 1, mode);
3314 /* If we can't get a part of Y, put Y into memory if it is a
3315 constant. Otherwise, force it into a register. Then we must
3316 be able to get a part of Y. */
3317 if (ypart == 0 && CONSTANT_P (y))
3319 y = use_anchored_address (force_const_mem (mode, y));
3320 ypart = operand_subword (y, i, 1, mode);
3322 else if (ypart == 0)
3323 ypart = operand_subword_force (y, i, mode);
3325 gcc_assert (xpart && ypart);
3327 need_clobber |= (GET_CODE (xpart) == SUBREG);
3329 last_insn = emit_move_insn (xpart, ypart);
3332 seq = get_insns ();
3333 end_sequence ();
3335 /* Show the output dies here. This is necessary for SUBREGs
3336 of pseudos since we cannot track their lifetimes correctly;
3337 hard regs shouldn't appear here except as return values.
3338 We never want to emit such a clobber after reload. */
3339 if (x != y
3340 && ! (reload_in_progress || reload_completed)
3341 && need_clobber != 0)
3342 emit_clobber (x);
3344 emit_insn (seq);
3346 return last_insn;
3349 /* Low level part of emit_move_insn.
3350 Called just like emit_move_insn, but assumes X and Y
3351 are basically valid. */
3353 rtx_insn *
3354 emit_move_insn_1 (rtx x, rtx y)
3356 machine_mode mode = GET_MODE (x);
3357 enum insn_code code;
3359 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3361 code = optab_handler (mov_optab, mode);
3362 if (code != CODE_FOR_nothing)
3363 return emit_insn (GEN_FCN (code) (x, y));
3365 /* Expand complex moves by moving real part and imag part. */
3366 if (COMPLEX_MODE_P (mode))
3367 return emit_move_complex (mode, x, y);
3369 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3370 || ALL_FIXED_POINT_MODE_P (mode))
3372 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3374 /* If we can't find an integer mode, use multi words. */
3375 if (result)
3376 return result;
3377 else
3378 return emit_move_multi_word (mode, x, y);
3381 if (GET_MODE_CLASS (mode) == MODE_CC)
3382 return emit_move_ccmode (mode, x, y);
3384 /* Try using a move pattern for the corresponding integer mode. This is
3385 only safe when simplify_subreg can convert MODE constants into integer
3386 constants. At present, it can only do this reliably if the value
3387 fits within a HOST_WIDE_INT. */
3388 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3390 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3392 if (ret)
3394 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3395 return ret;
3399 return emit_move_multi_word (mode, x, y);
3402 /* Generate code to copy Y into X.
3403 Both Y and X must have the same mode, except that
3404 Y can be a constant with VOIDmode.
3405 This mode cannot be BLKmode; use emit_block_move for that.
3407 Return the last instruction emitted. */
3409 rtx_insn *
3410 emit_move_insn (rtx x, rtx y)
3412 machine_mode mode = GET_MODE (x);
3413 rtx y_cst = NULL_RTX;
3414 rtx_insn *last_insn;
3415 rtx set;
3417 gcc_assert (mode != BLKmode
3418 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3420 if (CONSTANT_P (y))
3422 if (optimize
3423 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3424 && (last_insn = compress_float_constant (x, y)))
3425 return last_insn;
3427 y_cst = y;
3429 if (!targetm.legitimate_constant_p (mode, y))
3431 y = force_const_mem (mode, y);
3433 /* If the target's cannot_force_const_mem prevented the spill,
3434 assume that the target's move expanders will also take care
3435 of the non-legitimate constant. */
3436 if (!y)
3437 y = y_cst;
3438 else
3439 y = use_anchored_address (y);
3443 /* If X or Y are memory references, verify that their addresses are valid
3444 for the machine. */
3445 if (MEM_P (x)
3446 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3447 MEM_ADDR_SPACE (x))
3448 && ! push_operand (x, GET_MODE (x))))
3449 x = validize_mem (x);
3451 if (MEM_P (y)
3452 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3453 MEM_ADDR_SPACE (y)))
3454 y = validize_mem (y);
3456 gcc_assert (mode != BLKmode);
3458 last_insn = emit_move_insn_1 (x, y);
3460 if (y_cst && REG_P (x)
3461 && (set = single_set (last_insn)) != NULL_RTX
3462 && SET_DEST (set) == x
3463 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3464 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3466 return last_insn;
3469 /* Generate the body of an instruction to copy Y into X.
3470 It may be a list of insns, if one insn isn't enough. */
3472 rtx_insn *
3473 gen_move_insn (rtx x, rtx y)
3475 rtx_insn *seq;
3477 start_sequence ();
3478 emit_move_insn_1 (x, y);
3479 seq = get_insns ();
3480 end_sequence ();
3481 return seq;
3484 /* If Y is representable exactly in a narrower mode, and the target can
3485 perform the extension directly from constant or memory, then emit the
3486 move as an extension. */
3488 static rtx_insn *
3489 compress_float_constant (rtx x, rtx y)
3491 machine_mode dstmode = GET_MODE (x);
3492 machine_mode orig_srcmode = GET_MODE (y);
3493 machine_mode srcmode;
3494 const REAL_VALUE_TYPE *r;
3495 int oldcost, newcost;
3496 bool speed = optimize_insn_for_speed_p ();
3498 r = CONST_DOUBLE_REAL_VALUE (y);
3500 if (targetm.legitimate_constant_p (dstmode, y))
3501 oldcost = set_src_cost (y, orig_srcmode, speed);
3502 else
3503 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3505 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3506 srcmode != orig_srcmode;
3507 srcmode = GET_MODE_WIDER_MODE (srcmode))
3509 enum insn_code ic;
3510 rtx trunc_y;
3511 rtx_insn *last_insn;
3513 /* Skip if the target can't extend this way. */
3514 ic = can_extend_p (dstmode, srcmode, 0);
3515 if (ic == CODE_FOR_nothing)
3516 continue;
3518 /* Skip if the narrowed value isn't exact. */
3519 if (! exact_real_truncate (srcmode, r))
3520 continue;
3522 trunc_y = const_double_from_real_value (*r, srcmode);
3524 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3526 /* Skip if the target needs extra instructions to perform
3527 the extension. */
3528 if (!insn_operand_matches (ic, 1, trunc_y))
3529 continue;
3530 /* This is valid, but may not be cheaper than the original. */
3531 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3532 dstmode, speed);
3533 if (oldcost < newcost)
3534 continue;
3536 else if (float_extend_from_mem[dstmode][srcmode])
3538 trunc_y = force_const_mem (srcmode, trunc_y);
3539 /* This is valid, but may not be cheaper than the original. */
3540 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3541 dstmode, speed);
3542 if (oldcost < newcost)
3543 continue;
3544 trunc_y = validize_mem (trunc_y);
3546 else
3547 continue;
3549 /* For CSE's benefit, force the compressed constant pool entry
3550 into a new pseudo. This constant may be used in different modes,
3551 and if not, combine will put things back together for us. */
3552 trunc_y = force_reg (srcmode, trunc_y);
3554 /* If x is a hard register, perform the extension into a pseudo,
3555 so that e.g. stack realignment code is aware of it. */
3556 rtx target = x;
3557 if (REG_P (x) && HARD_REGISTER_P (x))
3558 target = gen_reg_rtx (dstmode);
3560 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3561 last_insn = get_last_insn ();
3563 if (REG_P (target))
3564 set_unique_reg_note (last_insn, REG_EQUAL, y);
3566 if (target != x)
3567 return emit_move_insn (x, target);
3568 return last_insn;
3571 return NULL;
3574 /* Pushing data onto the stack. */
3576 /* Push a block of length SIZE (perhaps variable)
3577 and return an rtx to address the beginning of the block.
3578 The value may be virtual_outgoing_args_rtx.
3580 EXTRA is the number of bytes of padding to push in addition to SIZE.
3581 BELOW nonzero means this padding comes at low addresses;
3582 otherwise, the padding comes at high addresses. */
3585 push_block (rtx size, int extra, int below)
3587 rtx temp;
3589 size = convert_modes (Pmode, ptr_mode, size, 1);
3590 if (CONSTANT_P (size))
3591 anti_adjust_stack (plus_constant (Pmode, size, extra));
3592 else if (REG_P (size) && extra == 0)
3593 anti_adjust_stack (size);
3594 else
3596 temp = copy_to_mode_reg (Pmode, size);
3597 if (extra != 0)
3598 temp = expand_binop (Pmode, add_optab, temp,
3599 gen_int_mode (extra, Pmode),
3600 temp, 0, OPTAB_LIB_WIDEN);
3601 anti_adjust_stack (temp);
3604 if (STACK_GROWS_DOWNWARD)
3606 temp = virtual_outgoing_args_rtx;
3607 if (extra != 0 && below)
3608 temp = plus_constant (Pmode, temp, extra);
3610 else
3612 if (CONST_INT_P (size))
3613 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3614 -INTVAL (size) - (below ? 0 : extra));
3615 else if (extra != 0 && !below)
3616 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3617 negate_rtx (Pmode, plus_constant (Pmode, size,
3618 extra)));
3619 else
3620 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3621 negate_rtx (Pmode, size));
3624 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3627 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3629 static rtx
3630 mem_autoinc_base (rtx mem)
3632 if (MEM_P (mem))
3634 rtx addr = XEXP (mem, 0);
3635 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3636 return XEXP (addr, 0);
3638 return NULL;
3641 /* A utility routine used here, in reload, and in try_split. The insns
3642 after PREV up to and including LAST are known to adjust the stack,
3643 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3644 placing notes as appropriate. PREV may be NULL, indicating the
3645 entire insn sequence prior to LAST should be scanned.
3647 The set of allowed stack pointer modifications is small:
3648 (1) One or more auto-inc style memory references (aka pushes),
3649 (2) One or more addition/subtraction with the SP as destination,
3650 (3) A single move insn with the SP as destination,
3651 (4) A call_pop insn,
3652 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3654 Insns in the sequence that do not modify the SP are ignored,
3655 except for noreturn calls.
3657 The return value is the amount of adjustment that can be trivially
3658 verified, via immediate operand or auto-inc. If the adjustment
3659 cannot be trivially extracted, the return value is INT_MIN. */
3661 HOST_WIDE_INT
3662 find_args_size_adjust (rtx_insn *insn)
3664 rtx dest, set, pat;
3665 int i;
3667 pat = PATTERN (insn);
3668 set = NULL;
3670 /* Look for a call_pop pattern. */
3671 if (CALL_P (insn))
3673 /* We have to allow non-call_pop patterns for the case
3674 of emit_single_push_insn of a TLS address. */
3675 if (GET_CODE (pat) != PARALLEL)
3676 return 0;
3678 /* All call_pop have a stack pointer adjust in the parallel.
3679 The call itself is always first, and the stack adjust is
3680 usually last, so search from the end. */
3681 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3683 set = XVECEXP (pat, 0, i);
3684 if (GET_CODE (set) != SET)
3685 continue;
3686 dest = SET_DEST (set);
3687 if (dest == stack_pointer_rtx)
3688 break;
3690 /* We'd better have found the stack pointer adjust. */
3691 if (i == 0)
3692 return 0;
3693 /* Fall through to process the extracted SET and DEST
3694 as if it was a standalone insn. */
3696 else if (GET_CODE (pat) == SET)
3697 set = pat;
3698 else if ((set = single_set (insn)) != NULL)
3700 else if (GET_CODE (pat) == PARALLEL)
3702 /* ??? Some older ports use a parallel with a stack adjust
3703 and a store for a PUSH_ROUNDING pattern, rather than a
3704 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3705 /* ??? See h8300 and m68k, pushqi1. */
3706 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3708 set = XVECEXP (pat, 0, i);
3709 if (GET_CODE (set) != SET)
3710 continue;
3711 dest = SET_DEST (set);
3712 if (dest == stack_pointer_rtx)
3713 break;
3715 /* We do not expect an auto-inc of the sp in the parallel. */
3716 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3717 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3718 != stack_pointer_rtx);
3720 if (i < 0)
3721 return 0;
3723 else
3724 return 0;
3726 dest = SET_DEST (set);
3728 /* Look for direct modifications of the stack pointer. */
3729 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3731 /* Look for a trivial adjustment, otherwise assume nothing. */
3732 /* Note that the SPU restore_stack_block pattern refers to
3733 the stack pointer in V4SImode. Consider that non-trivial. */
3734 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3735 && GET_CODE (SET_SRC (set)) == PLUS
3736 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3737 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3738 return INTVAL (XEXP (SET_SRC (set), 1));
3739 /* ??? Reload can generate no-op moves, which will be cleaned
3740 up later. Recognize it and continue searching. */
3741 else if (rtx_equal_p (dest, SET_SRC (set)))
3742 return 0;
3743 else
3744 return HOST_WIDE_INT_MIN;
3746 else
3748 rtx mem, addr;
3750 /* Otherwise only think about autoinc patterns. */
3751 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3753 mem = dest;
3754 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3755 != stack_pointer_rtx);
3757 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3758 mem = SET_SRC (set);
3759 else
3760 return 0;
3762 addr = XEXP (mem, 0);
3763 switch (GET_CODE (addr))
3765 case PRE_INC:
3766 case POST_INC:
3767 return GET_MODE_SIZE (GET_MODE (mem));
3768 case PRE_DEC:
3769 case POST_DEC:
3770 return -GET_MODE_SIZE (GET_MODE (mem));
3771 case PRE_MODIFY:
3772 case POST_MODIFY:
3773 addr = XEXP (addr, 1);
3774 gcc_assert (GET_CODE (addr) == PLUS);
3775 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3776 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3777 return INTVAL (XEXP (addr, 1));
3778 default:
3779 gcc_unreachable ();
3785 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3787 int args_size = end_args_size;
3788 bool saw_unknown = false;
3789 rtx_insn *insn;
3791 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3793 HOST_WIDE_INT this_delta;
3795 if (!NONDEBUG_INSN_P (insn))
3796 continue;
3798 this_delta = find_args_size_adjust (insn);
3799 if (this_delta == 0)
3801 if (!CALL_P (insn)
3802 || ACCUMULATE_OUTGOING_ARGS
3803 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3804 continue;
3807 gcc_assert (!saw_unknown);
3808 if (this_delta == HOST_WIDE_INT_MIN)
3809 saw_unknown = true;
3811 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3812 if (STACK_GROWS_DOWNWARD)
3813 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3815 args_size -= this_delta;
3818 return saw_unknown ? INT_MIN : args_size;
3821 #ifdef PUSH_ROUNDING
3822 /* Emit single push insn. */
3824 static void
3825 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3827 rtx dest_addr;
3828 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3829 rtx dest;
3830 enum insn_code icode;
3832 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3833 /* If there is push pattern, use it. Otherwise try old way of throwing
3834 MEM representing push operation to move expander. */
3835 icode = optab_handler (push_optab, mode);
3836 if (icode != CODE_FOR_nothing)
3838 struct expand_operand ops[1];
3840 create_input_operand (&ops[0], x, mode);
3841 if (maybe_expand_insn (icode, 1, ops))
3842 return;
3844 if (GET_MODE_SIZE (mode) == rounded_size)
3845 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3846 /* If we are to pad downward, adjust the stack pointer first and
3847 then store X into the stack location using an offset. This is
3848 because emit_move_insn does not know how to pad; it does not have
3849 access to type. */
3850 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3852 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3853 HOST_WIDE_INT offset;
3855 emit_move_insn (stack_pointer_rtx,
3856 expand_binop (Pmode,
3857 STACK_GROWS_DOWNWARD ? sub_optab
3858 : add_optab,
3859 stack_pointer_rtx,
3860 gen_int_mode (rounded_size, Pmode),
3861 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3863 offset = (HOST_WIDE_INT) padding_size;
3864 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3865 /* We have already decremented the stack pointer, so get the
3866 previous value. */
3867 offset += (HOST_WIDE_INT) rounded_size;
3869 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
3870 /* We have already incremented the stack pointer, so get the
3871 previous value. */
3872 offset -= (HOST_WIDE_INT) rounded_size;
3874 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3875 gen_int_mode (offset, Pmode));
3877 else
3879 if (STACK_GROWS_DOWNWARD)
3880 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3881 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3882 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
3883 Pmode));
3884 else
3885 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3886 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3887 gen_int_mode (rounded_size, Pmode));
3889 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3892 dest = gen_rtx_MEM (mode, dest_addr);
3894 if (type != 0)
3896 set_mem_attributes (dest, type, 1);
3898 if (cfun->tail_call_marked)
3899 /* Function incoming arguments may overlap with sibling call
3900 outgoing arguments and we cannot allow reordering of reads
3901 from function arguments with stores to outgoing arguments
3902 of sibling calls. */
3903 set_mem_alias_set (dest, 0);
3905 emit_move_insn (dest, x);
3908 /* Emit and annotate a single push insn. */
3910 static void
3911 emit_single_push_insn (machine_mode mode, rtx x, tree type)
3913 int delta, old_delta = stack_pointer_delta;
3914 rtx_insn *prev = get_last_insn ();
3915 rtx_insn *last;
3917 emit_single_push_insn_1 (mode, x, type);
3919 last = get_last_insn ();
3921 /* Notice the common case where we emitted exactly one insn. */
3922 if (PREV_INSN (last) == prev)
3924 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3925 return;
3928 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3929 gcc_assert (delta == INT_MIN || delta == old_delta);
3931 #endif
3933 /* If reading SIZE bytes from X will end up reading from
3934 Y return the number of bytes that overlap. Return -1
3935 if there is no overlap or -2 if we can't determine
3936 (for example when X and Y have different base registers). */
3938 static int
3939 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
3941 rtx tmp = plus_constant (Pmode, x, size);
3942 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
3944 if (!CONST_INT_P (sub))
3945 return -2;
3947 HOST_WIDE_INT val = INTVAL (sub);
3949 return IN_RANGE (val, 1, size) ? val : -1;
3952 /* Generate code to push X onto the stack, assuming it has mode MODE and
3953 type TYPE.
3954 MODE is redundant except when X is a CONST_INT (since they don't
3955 carry mode info).
3956 SIZE is an rtx for the size of data to be copied (in bytes),
3957 needed only if X is BLKmode.
3958 Return true if successful. May return false if asked to push a
3959 partial argument during a sibcall optimization (as specified by
3960 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
3961 to not overlap.
3963 ALIGN (in bits) is maximum alignment we can assume.
3965 If PARTIAL and REG are both nonzero, then copy that many of the first
3966 bytes of X into registers starting with REG, and push the rest of X.
3967 The amount of space pushed is decreased by PARTIAL bytes.
3968 REG must be a hard register in this case.
3969 If REG is zero but PARTIAL is not, take any all others actions for an
3970 argument partially in registers, but do not actually load any
3971 registers.
3973 EXTRA is the amount in bytes of extra space to leave next to this arg.
3974 This is ignored if an argument block has already been allocated.
3976 On a machine that lacks real push insns, ARGS_ADDR is the address of
3977 the bottom of the argument block for this call. We use indexing off there
3978 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3979 argument block has not been preallocated.
3981 ARGS_SO_FAR is the size of args previously pushed for this call.
3983 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3984 for arguments passed in registers. If nonzero, it will be the number
3985 of bytes required. */
3987 bool
3988 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
3989 unsigned int align, int partial, rtx reg, int extra,
3990 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3991 rtx alignment_pad, bool sibcall_p)
3993 rtx xinner;
3994 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
3996 /* Decide where to pad the argument: `downward' for below,
3997 `upward' for above, or `none' for don't pad it.
3998 Default is below for small data on big-endian machines; else above. */
3999 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4001 /* Invert direction if stack is post-decrement.
4002 FIXME: why? */
4003 if (STACK_PUSH_CODE == POST_DEC)
4004 if (where_pad != none)
4005 where_pad = (where_pad == downward ? upward : downward);
4007 xinner = x;
4009 int nregs = partial / UNITS_PER_WORD;
4010 rtx *tmp_regs = NULL;
4011 int overlapping = 0;
4013 if (mode == BLKmode
4014 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4016 /* Copy a block into the stack, entirely or partially. */
4018 rtx temp;
4019 int used;
4020 int offset;
4021 int skip;
4023 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4024 used = partial - offset;
4026 if (mode != BLKmode)
4028 /* A value is to be stored in an insufficiently aligned
4029 stack slot; copy via a suitably aligned slot if
4030 necessary. */
4031 size = GEN_INT (GET_MODE_SIZE (mode));
4032 if (!MEM_P (xinner))
4034 temp = assign_temp (type, 1, 1);
4035 emit_move_insn (temp, xinner);
4036 xinner = temp;
4040 gcc_assert (size);
4042 /* USED is now the # of bytes we need not copy to the stack
4043 because registers will take care of them. */
4045 if (partial != 0)
4046 xinner = adjust_address (xinner, BLKmode, used);
4048 /* If the partial register-part of the arg counts in its stack size,
4049 skip the part of stack space corresponding to the registers.
4050 Otherwise, start copying to the beginning of the stack space,
4051 by setting SKIP to 0. */
4052 skip = (reg_parm_stack_space == 0) ? 0 : used;
4054 #ifdef PUSH_ROUNDING
4055 /* Do it with several push insns if that doesn't take lots of insns
4056 and if there is no difficulty with push insns that skip bytes
4057 on the stack for alignment purposes. */
4058 if (args_addr == 0
4059 && PUSH_ARGS
4060 && CONST_INT_P (size)
4061 && skip == 0
4062 && MEM_ALIGN (xinner) >= align
4063 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4064 /* Here we avoid the case of a structure whose weak alignment
4065 forces many pushes of a small amount of data,
4066 and such small pushes do rounding that causes trouble. */
4067 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4068 || align >= BIGGEST_ALIGNMENT
4069 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4070 == (align / BITS_PER_UNIT)))
4071 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4073 /* Push padding now if padding above and stack grows down,
4074 or if padding below and stack grows up.
4075 But if space already allocated, this has already been done. */
4076 if (extra && args_addr == 0
4077 && where_pad != none && where_pad != stack_direction)
4078 anti_adjust_stack (GEN_INT (extra));
4080 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4082 else
4083 #endif /* PUSH_ROUNDING */
4085 rtx target;
4087 /* Otherwise make space on the stack and copy the data
4088 to the address of that space. */
4090 /* Deduct words put into registers from the size we must copy. */
4091 if (partial != 0)
4093 if (CONST_INT_P (size))
4094 size = GEN_INT (INTVAL (size) - used);
4095 else
4096 size = expand_binop (GET_MODE (size), sub_optab, size,
4097 gen_int_mode (used, GET_MODE (size)),
4098 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4101 /* Get the address of the stack space.
4102 In this case, we do not deal with EXTRA separately.
4103 A single stack adjust will do. */
4104 if (! args_addr)
4106 temp = push_block (size, extra, where_pad == downward);
4107 extra = 0;
4109 else if (CONST_INT_P (args_so_far))
4110 temp = memory_address (BLKmode,
4111 plus_constant (Pmode, args_addr,
4112 skip + INTVAL (args_so_far)));
4113 else
4114 temp = memory_address (BLKmode,
4115 plus_constant (Pmode,
4116 gen_rtx_PLUS (Pmode,
4117 args_addr,
4118 args_so_far),
4119 skip));
4121 if (!ACCUMULATE_OUTGOING_ARGS)
4123 /* If the source is referenced relative to the stack pointer,
4124 copy it to another register to stabilize it. We do not need
4125 to do this if we know that we won't be changing sp. */
4127 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4128 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4129 temp = copy_to_reg (temp);
4132 target = gen_rtx_MEM (BLKmode, temp);
4134 /* We do *not* set_mem_attributes here, because incoming arguments
4135 may overlap with sibling call outgoing arguments and we cannot
4136 allow reordering of reads from function arguments with stores
4137 to outgoing arguments of sibling calls. We do, however, want
4138 to record the alignment of the stack slot. */
4139 /* ALIGN may well be better aligned than TYPE, e.g. due to
4140 PARM_BOUNDARY. Assume the caller isn't lying. */
4141 set_mem_align (target, align);
4143 /* If part should go in registers and pushing to that part would
4144 overwrite some of the values that need to go into regs, load the
4145 overlapping values into temporary pseudos to be moved into the hard
4146 regs at the end after the stack pushing has completed.
4147 We cannot load them directly into the hard regs here because
4148 they can be clobbered by the block move expansions.
4149 See PR 65358. */
4151 if (partial > 0 && reg != 0 && mode == BLKmode
4152 && GET_CODE (reg) != PARALLEL)
4154 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4155 if (overlapping > 0)
4157 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4158 overlapping /= UNITS_PER_WORD;
4160 tmp_regs = XALLOCAVEC (rtx, overlapping);
4162 for (int i = 0; i < overlapping; i++)
4163 tmp_regs[i] = gen_reg_rtx (word_mode);
4165 for (int i = 0; i < overlapping; i++)
4166 emit_move_insn (tmp_regs[i],
4167 operand_subword_force (target, i, mode));
4169 else if (overlapping == -1)
4170 overlapping = 0;
4171 /* Could not determine whether there is overlap.
4172 Fail the sibcall. */
4173 else
4175 overlapping = 0;
4176 if (sibcall_p)
4177 return false;
4180 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4183 else if (partial > 0)
4185 /* Scalar partly in registers. */
4187 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4188 int i;
4189 int not_stack;
4190 /* # bytes of start of argument
4191 that we must make space for but need not store. */
4192 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4193 int args_offset = INTVAL (args_so_far);
4194 int skip;
4196 /* Push padding now if padding above and stack grows down,
4197 or if padding below and stack grows up.
4198 But if space already allocated, this has already been done. */
4199 if (extra && args_addr == 0
4200 && where_pad != none && where_pad != stack_direction)
4201 anti_adjust_stack (GEN_INT (extra));
4203 /* If we make space by pushing it, we might as well push
4204 the real data. Otherwise, we can leave OFFSET nonzero
4205 and leave the space uninitialized. */
4206 if (args_addr == 0)
4207 offset = 0;
4209 /* Now NOT_STACK gets the number of words that we don't need to
4210 allocate on the stack. Convert OFFSET to words too. */
4211 not_stack = (partial - offset) / UNITS_PER_WORD;
4212 offset /= UNITS_PER_WORD;
4214 /* If the partial register-part of the arg counts in its stack size,
4215 skip the part of stack space corresponding to the registers.
4216 Otherwise, start copying to the beginning of the stack space,
4217 by setting SKIP to 0. */
4218 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4220 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4221 x = validize_mem (force_const_mem (mode, x));
4223 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4224 SUBREGs of such registers are not allowed. */
4225 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4226 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4227 x = copy_to_reg (x);
4229 /* Loop over all the words allocated on the stack for this arg. */
4230 /* We can do it by words, because any scalar bigger than a word
4231 has a size a multiple of a word. */
4232 for (i = size - 1; i >= not_stack; i--)
4233 if (i >= not_stack + offset)
4234 if (!emit_push_insn (operand_subword_force (x, i, mode),
4235 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4236 0, args_addr,
4237 GEN_INT (args_offset + ((i - not_stack + skip)
4238 * UNITS_PER_WORD)),
4239 reg_parm_stack_space, alignment_pad, sibcall_p))
4240 return false;
4242 else
4244 rtx addr;
4245 rtx dest;
4247 /* Push padding now if padding above and stack grows down,
4248 or if padding below and stack grows up.
4249 But if space already allocated, this has already been done. */
4250 if (extra && args_addr == 0
4251 && where_pad != none && where_pad != stack_direction)
4252 anti_adjust_stack (GEN_INT (extra));
4254 #ifdef PUSH_ROUNDING
4255 if (args_addr == 0 && PUSH_ARGS)
4256 emit_single_push_insn (mode, x, type);
4257 else
4258 #endif
4260 if (CONST_INT_P (args_so_far))
4261 addr
4262 = memory_address (mode,
4263 plus_constant (Pmode, args_addr,
4264 INTVAL (args_so_far)));
4265 else
4266 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4267 args_so_far));
4268 dest = gen_rtx_MEM (mode, addr);
4270 /* We do *not* set_mem_attributes here, because incoming arguments
4271 may overlap with sibling call outgoing arguments and we cannot
4272 allow reordering of reads from function arguments with stores
4273 to outgoing arguments of sibling calls. We do, however, want
4274 to record the alignment of the stack slot. */
4275 /* ALIGN may well be better aligned than TYPE, e.g. due to
4276 PARM_BOUNDARY. Assume the caller isn't lying. */
4277 set_mem_align (dest, align);
4279 emit_move_insn (dest, x);
4283 /* Move the partial arguments into the registers and any overlapping
4284 values that we moved into the pseudos in tmp_regs. */
4285 if (partial > 0 && reg != 0)
4287 /* Handle calls that pass values in multiple non-contiguous locations.
4288 The Irix 6 ABI has examples of this. */
4289 if (GET_CODE (reg) == PARALLEL)
4290 emit_group_load (reg, x, type, -1);
4291 else
4293 gcc_assert (partial % UNITS_PER_WORD == 0);
4294 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4296 for (int i = 0; i < overlapping; i++)
4297 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4298 + nregs - overlapping + i),
4299 tmp_regs[i]);
4304 if (extra && args_addr == 0 && where_pad == stack_direction)
4305 anti_adjust_stack (GEN_INT (extra));
4307 if (alignment_pad && args_addr == 0)
4308 anti_adjust_stack (alignment_pad);
4310 return true;
4313 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4314 operations. */
4316 static rtx
4317 get_subtarget (rtx x)
4319 return (optimize
4320 || x == 0
4321 /* Only registers can be subtargets. */
4322 || !REG_P (x)
4323 /* Don't use hard regs to avoid extending their life. */
4324 || REGNO (x) < FIRST_PSEUDO_REGISTER
4325 ? 0 : x);
4328 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4329 FIELD is a bitfield. Returns true if the optimization was successful,
4330 and there's nothing else to do. */
4332 static bool
4333 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4334 unsigned HOST_WIDE_INT bitpos,
4335 unsigned HOST_WIDE_INT bitregion_start,
4336 unsigned HOST_WIDE_INT bitregion_end,
4337 machine_mode mode1, rtx str_rtx,
4338 tree to, tree src, bool reverse)
4340 machine_mode str_mode = GET_MODE (str_rtx);
4341 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4342 tree op0, op1;
4343 rtx value, result;
4344 optab binop;
4345 gimple *srcstmt;
4346 enum tree_code code;
4348 if (mode1 != VOIDmode
4349 || bitsize >= BITS_PER_WORD
4350 || str_bitsize > BITS_PER_WORD
4351 || TREE_SIDE_EFFECTS (to)
4352 || TREE_THIS_VOLATILE (to))
4353 return false;
4355 STRIP_NOPS (src);
4356 if (TREE_CODE (src) != SSA_NAME)
4357 return false;
4358 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4359 return false;
4361 srcstmt = get_gimple_for_ssa_name (src);
4362 if (!srcstmt
4363 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4364 return false;
4366 code = gimple_assign_rhs_code (srcstmt);
4368 op0 = gimple_assign_rhs1 (srcstmt);
4370 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4371 to find its initialization. Hopefully the initialization will
4372 be from a bitfield load. */
4373 if (TREE_CODE (op0) == SSA_NAME)
4375 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4377 /* We want to eventually have OP0 be the same as TO, which
4378 should be a bitfield. */
4379 if (!op0stmt
4380 || !is_gimple_assign (op0stmt)
4381 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4382 return false;
4383 op0 = gimple_assign_rhs1 (op0stmt);
4386 op1 = gimple_assign_rhs2 (srcstmt);
4388 if (!operand_equal_p (to, op0, 0))
4389 return false;
4391 if (MEM_P (str_rtx))
4393 unsigned HOST_WIDE_INT offset1;
4395 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4396 str_mode = word_mode;
4397 str_mode = get_best_mode (bitsize, bitpos,
4398 bitregion_start, bitregion_end,
4399 MEM_ALIGN (str_rtx), str_mode, 0);
4400 if (str_mode == VOIDmode)
4401 return false;
4402 str_bitsize = GET_MODE_BITSIZE (str_mode);
4404 offset1 = bitpos;
4405 bitpos %= str_bitsize;
4406 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4407 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4409 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4410 return false;
4411 else
4412 gcc_assert (!reverse);
4414 /* If the bit field covers the whole REG/MEM, store_field
4415 will likely generate better code. */
4416 if (bitsize >= str_bitsize)
4417 return false;
4419 /* We can't handle fields split across multiple entities. */
4420 if (bitpos + bitsize > str_bitsize)
4421 return false;
4423 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4424 bitpos = str_bitsize - bitpos - bitsize;
4426 switch (code)
4428 case PLUS_EXPR:
4429 case MINUS_EXPR:
4430 /* For now, just optimize the case of the topmost bitfield
4431 where we don't need to do any masking and also
4432 1 bit bitfields where xor can be used.
4433 We might win by one instruction for the other bitfields
4434 too if insv/extv instructions aren't used, so that
4435 can be added later. */
4436 if ((reverse || bitpos + bitsize != str_bitsize)
4437 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4438 break;
4440 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4441 value = convert_modes (str_mode,
4442 TYPE_MODE (TREE_TYPE (op1)), value,
4443 TYPE_UNSIGNED (TREE_TYPE (op1)));
4445 /* We may be accessing data outside the field, which means
4446 we can alias adjacent data. */
4447 if (MEM_P (str_rtx))
4449 str_rtx = shallow_copy_rtx (str_rtx);
4450 set_mem_alias_set (str_rtx, 0);
4451 set_mem_expr (str_rtx, 0);
4454 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4456 value = expand_and (str_mode, value, const1_rtx, NULL);
4457 binop = xor_optab;
4459 else
4460 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4462 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4463 if (reverse)
4464 value = flip_storage_order (str_mode, value);
4465 result = expand_binop (str_mode, binop, str_rtx,
4466 value, str_rtx, 1, OPTAB_WIDEN);
4467 if (result != str_rtx)
4468 emit_move_insn (str_rtx, result);
4469 return true;
4471 case BIT_IOR_EXPR:
4472 case BIT_XOR_EXPR:
4473 if (TREE_CODE (op1) != INTEGER_CST)
4474 break;
4475 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4476 value = convert_modes (str_mode,
4477 TYPE_MODE (TREE_TYPE (op1)), value,
4478 TYPE_UNSIGNED (TREE_TYPE (op1)));
4480 /* We may be accessing data outside the field, which means
4481 we can alias adjacent data. */
4482 if (MEM_P (str_rtx))
4484 str_rtx = shallow_copy_rtx (str_rtx);
4485 set_mem_alias_set (str_rtx, 0);
4486 set_mem_expr (str_rtx, 0);
4489 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4490 if (bitpos + bitsize != str_bitsize)
4492 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4493 str_mode);
4494 value = expand_and (str_mode, value, mask, NULL_RTX);
4496 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4497 if (reverse)
4498 value = flip_storage_order (str_mode, value);
4499 result = expand_binop (str_mode, binop, str_rtx,
4500 value, str_rtx, 1, OPTAB_WIDEN);
4501 if (result != str_rtx)
4502 emit_move_insn (str_rtx, result);
4503 return true;
4505 default:
4506 break;
4509 return false;
4512 /* In the C++ memory model, consecutive bit fields in a structure are
4513 considered one memory location.
4515 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4516 returns the bit range of consecutive bits in which this COMPONENT_REF
4517 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4518 and *OFFSET may be adjusted in the process.
4520 If the access does not need to be restricted, 0 is returned in both
4521 *BITSTART and *BITEND. */
4523 static void
4524 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4525 unsigned HOST_WIDE_INT *bitend,
4526 tree exp,
4527 HOST_WIDE_INT *bitpos,
4528 tree *offset)
4530 HOST_WIDE_INT bitoffset;
4531 tree field, repr;
4533 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4535 field = TREE_OPERAND (exp, 1);
4536 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4537 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4538 need to limit the range we can access. */
4539 if (!repr)
4541 *bitstart = *bitend = 0;
4542 return;
4545 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4546 part of a larger bit field, then the representative does not serve any
4547 useful purpose. This can occur in Ada. */
4548 if (handled_component_p (TREE_OPERAND (exp, 0)))
4550 machine_mode rmode;
4551 HOST_WIDE_INT rbitsize, rbitpos;
4552 tree roffset;
4553 int unsignedp, reversep, volatilep = 0;
4554 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4555 &roffset, &rmode, &unsignedp, &reversep,
4556 &volatilep, false);
4557 if ((rbitpos % BITS_PER_UNIT) != 0)
4559 *bitstart = *bitend = 0;
4560 return;
4564 /* Compute the adjustment to bitpos from the offset of the field
4565 relative to the representative. DECL_FIELD_OFFSET of field and
4566 repr are the same by construction if they are not constants,
4567 see finish_bitfield_layout. */
4568 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4569 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4570 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4571 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4572 else
4573 bitoffset = 0;
4574 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4575 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4577 /* If the adjustment is larger than bitpos, we would have a negative bit
4578 position for the lower bound and this may wreak havoc later. Adjust
4579 offset and bitpos to make the lower bound non-negative in that case. */
4580 if (bitoffset > *bitpos)
4582 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4583 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4585 *bitpos += adjust;
4586 if (*offset == NULL_TREE)
4587 *offset = size_int (-adjust / BITS_PER_UNIT);
4588 else
4589 *offset
4590 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4591 *bitstart = 0;
4593 else
4594 *bitstart = *bitpos - bitoffset;
4596 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4599 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4600 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4601 DECL_RTL was not set yet, return NORTL. */
4603 static inline bool
4604 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4606 if (TREE_CODE (addr) != ADDR_EXPR)
4607 return false;
4609 tree base = TREE_OPERAND (addr, 0);
4611 if (!DECL_P (base)
4612 || TREE_ADDRESSABLE (base)
4613 || DECL_MODE (base) == BLKmode)
4614 return false;
4616 if (!DECL_RTL_SET_P (base))
4617 return nortl;
4619 return (!MEM_P (DECL_RTL (base)));
4622 /* Returns true if the MEM_REF REF refers to an object that does not
4623 reside in memory and has non-BLKmode. */
4625 static inline bool
4626 mem_ref_refers_to_non_mem_p (tree ref)
4628 tree base = TREE_OPERAND (ref, 0);
4629 return addr_expr_of_non_mem_decl_p_1 (base, false);
4632 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4633 is true, try generating a nontemporal store. */
4635 void
4636 expand_assignment (tree to, tree from, bool nontemporal)
4638 rtx to_rtx = 0;
4639 rtx result;
4640 machine_mode mode;
4641 unsigned int align;
4642 enum insn_code icode;
4644 /* Don't crash if the lhs of the assignment was erroneous. */
4645 if (TREE_CODE (to) == ERROR_MARK)
4647 expand_normal (from);
4648 return;
4651 /* Optimize away no-op moves without side-effects. */
4652 if (operand_equal_p (to, from, 0))
4653 return;
4655 /* Handle misaligned stores. */
4656 mode = TYPE_MODE (TREE_TYPE (to));
4657 if ((TREE_CODE (to) == MEM_REF
4658 || TREE_CODE (to) == TARGET_MEM_REF)
4659 && mode != BLKmode
4660 && !mem_ref_refers_to_non_mem_p (to)
4661 && ((align = get_object_alignment (to))
4662 < GET_MODE_ALIGNMENT (mode))
4663 && (((icode = optab_handler (movmisalign_optab, mode))
4664 != CODE_FOR_nothing)
4665 || SLOW_UNALIGNED_ACCESS (mode, align)))
4667 rtx reg, mem;
4669 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4670 reg = force_not_mem (reg);
4671 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4672 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4673 reg = flip_storage_order (mode, reg);
4675 if (icode != CODE_FOR_nothing)
4677 struct expand_operand ops[2];
4679 create_fixed_operand (&ops[0], mem);
4680 create_input_operand (&ops[1], reg, mode);
4681 /* The movmisalign<mode> pattern cannot fail, else the assignment
4682 would silently be omitted. */
4683 expand_insn (icode, 2, ops);
4685 else
4686 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4687 false);
4688 return;
4691 /* Assignment of a structure component needs special treatment
4692 if the structure component's rtx is not simply a MEM.
4693 Assignment of an array element at a constant index, and assignment of
4694 an array element in an unaligned packed structure field, has the same
4695 problem. Same for (partially) storing into a non-memory object. */
4696 if (handled_component_p (to)
4697 || (TREE_CODE (to) == MEM_REF
4698 && (REF_REVERSE_STORAGE_ORDER (to)
4699 || mem_ref_refers_to_non_mem_p (to)))
4700 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4702 machine_mode mode1;
4703 HOST_WIDE_INT bitsize, bitpos;
4704 unsigned HOST_WIDE_INT bitregion_start = 0;
4705 unsigned HOST_WIDE_INT bitregion_end = 0;
4706 tree offset;
4707 int unsignedp, reversep, volatilep = 0;
4708 tree tem;
4710 push_temp_slots ();
4711 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4712 &unsignedp, &reversep, &volatilep, true);
4714 /* Make sure bitpos is not negative, it can wreak havoc later. */
4715 if (bitpos < 0)
4717 gcc_assert (offset == NULL_TREE);
4718 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4719 ? 3 : exact_log2 (BITS_PER_UNIT)));
4720 bitpos &= BITS_PER_UNIT - 1;
4723 if (TREE_CODE (to) == COMPONENT_REF
4724 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4725 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4726 /* The C++ memory model naturally applies to byte-aligned fields.
4727 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4728 BITSIZE are not byte-aligned, there is no need to limit the range
4729 we can access. This can occur with packed structures in Ada. */
4730 else if (bitsize > 0
4731 && bitsize % BITS_PER_UNIT == 0
4732 && bitpos % BITS_PER_UNIT == 0)
4734 bitregion_start = bitpos;
4735 bitregion_end = bitpos + bitsize - 1;
4738 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4740 /* If the field has a mode, we want to access it in the
4741 field's mode, not the computed mode.
4742 If a MEM has VOIDmode (external with incomplete type),
4743 use BLKmode for it instead. */
4744 if (MEM_P (to_rtx))
4746 if (mode1 != VOIDmode)
4747 to_rtx = adjust_address (to_rtx, mode1, 0);
4748 else if (GET_MODE (to_rtx) == VOIDmode)
4749 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4752 if (offset != 0)
4754 machine_mode address_mode;
4755 rtx offset_rtx;
4757 if (!MEM_P (to_rtx))
4759 /* We can get constant negative offsets into arrays with broken
4760 user code. Translate this to a trap instead of ICEing. */
4761 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4762 expand_builtin_trap ();
4763 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4766 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4767 address_mode = get_address_mode (to_rtx);
4768 if (GET_MODE (offset_rtx) != address_mode)
4770 /* We cannot be sure that the RTL in offset_rtx is valid outside
4771 of a memory address context, so force it into a register
4772 before attempting to convert it to the desired mode. */
4773 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4774 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4777 /* If we have an expression in OFFSET_RTX and a non-zero
4778 byte offset in BITPOS, adding the byte offset before the
4779 OFFSET_RTX results in better intermediate code, which makes
4780 later rtl optimization passes perform better.
4782 We prefer intermediate code like this:
4784 r124:DI=r123:DI+0x18
4785 [r124:DI]=r121:DI
4787 ... instead of ...
4789 r124:DI=r123:DI+0x10
4790 [r124:DI+0x8]=r121:DI
4792 This is only done for aligned data values, as these can
4793 be expected to result in single move instructions. */
4794 if (mode1 != VOIDmode
4795 && bitpos != 0
4796 && bitsize > 0
4797 && (bitpos % bitsize) == 0
4798 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4799 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4801 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4802 bitregion_start = 0;
4803 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4804 bitregion_end -= bitpos;
4805 bitpos = 0;
4808 to_rtx = offset_address (to_rtx, offset_rtx,
4809 highest_pow2_factor_for_target (to,
4810 offset));
4813 /* No action is needed if the target is not a memory and the field
4814 lies completely outside that target. This can occur if the source
4815 code contains an out-of-bounds access to a small array. */
4816 if (!MEM_P (to_rtx)
4817 && GET_MODE (to_rtx) != BLKmode
4818 && (unsigned HOST_WIDE_INT) bitpos
4819 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4821 expand_normal (from);
4822 result = NULL;
4824 /* Handle expand_expr of a complex value returning a CONCAT. */
4825 else if (GET_CODE (to_rtx) == CONCAT)
4827 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4828 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4829 && bitpos == 0
4830 && bitsize == mode_bitsize)
4831 result = store_expr (from, to_rtx, false, nontemporal, reversep);
4832 else if (bitsize == mode_bitsize / 2
4833 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4834 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4835 nontemporal, reversep);
4836 else if (bitpos + bitsize <= mode_bitsize / 2)
4837 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4838 bitregion_start, bitregion_end,
4839 mode1, from, get_alias_set (to),
4840 nontemporal, reversep);
4841 else if (bitpos >= mode_bitsize / 2)
4842 result = store_field (XEXP (to_rtx, 1), bitsize,
4843 bitpos - mode_bitsize / 2,
4844 bitregion_start, bitregion_end,
4845 mode1, from, get_alias_set (to),
4846 nontemporal, reversep);
4847 else if (bitpos == 0 && bitsize == mode_bitsize)
4849 rtx from_rtx;
4850 result = expand_normal (from);
4851 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4852 TYPE_MODE (TREE_TYPE (from)), 0);
4853 emit_move_insn (XEXP (to_rtx, 0),
4854 read_complex_part (from_rtx, false));
4855 emit_move_insn (XEXP (to_rtx, 1),
4856 read_complex_part (from_rtx, true));
4858 else
4860 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4861 GET_MODE_SIZE (GET_MODE (to_rtx)));
4862 write_complex_part (temp, XEXP (to_rtx, 0), false);
4863 write_complex_part (temp, XEXP (to_rtx, 1), true);
4864 result = store_field (temp, bitsize, bitpos,
4865 bitregion_start, bitregion_end,
4866 mode1, from, get_alias_set (to),
4867 nontemporal, reversep);
4868 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4869 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4872 else
4874 if (MEM_P (to_rtx))
4876 /* If the field is at offset zero, we could have been given the
4877 DECL_RTX of the parent struct. Don't munge it. */
4878 to_rtx = shallow_copy_rtx (to_rtx);
4879 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4880 if (volatilep)
4881 MEM_VOLATILE_P (to_rtx) = 1;
4884 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4885 bitregion_start, bitregion_end,
4886 mode1, to_rtx, to, from,
4887 reversep))
4888 result = NULL;
4889 else
4890 result = store_field (to_rtx, bitsize, bitpos,
4891 bitregion_start, bitregion_end,
4892 mode1, from, get_alias_set (to),
4893 nontemporal, reversep);
4896 if (result)
4897 preserve_temp_slots (result);
4898 pop_temp_slots ();
4899 return;
4902 /* If the rhs is a function call and its value is not an aggregate,
4903 call the function before we start to compute the lhs.
4904 This is needed for correct code for cases such as
4905 val = setjmp (buf) on machines where reference to val
4906 requires loading up part of an address in a separate insn.
4908 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4909 since it might be a promoted variable where the zero- or sign- extension
4910 needs to be done. Handling this in the normal way is safe because no
4911 computation is done before the call. The same is true for SSA names. */
4912 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4913 && COMPLETE_TYPE_P (TREE_TYPE (from))
4914 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4915 && ! (((TREE_CODE (to) == VAR_DECL
4916 || TREE_CODE (to) == PARM_DECL
4917 || TREE_CODE (to) == RESULT_DECL)
4918 && REG_P (DECL_RTL (to)))
4919 || TREE_CODE (to) == SSA_NAME))
4921 rtx value;
4922 rtx bounds;
4924 push_temp_slots ();
4925 value = expand_normal (from);
4927 /* Split value and bounds to store them separately. */
4928 chkp_split_slot (value, &value, &bounds);
4930 if (to_rtx == 0)
4931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4933 /* Handle calls that return values in multiple non-contiguous locations.
4934 The Irix 6 ABI has examples of this. */
4935 if (GET_CODE (to_rtx) == PARALLEL)
4937 if (GET_CODE (value) == PARALLEL)
4938 emit_group_move (to_rtx, value);
4939 else
4940 emit_group_load (to_rtx, value, TREE_TYPE (from),
4941 int_size_in_bytes (TREE_TYPE (from)));
4943 else if (GET_CODE (value) == PARALLEL)
4944 emit_group_store (to_rtx, value, TREE_TYPE (from),
4945 int_size_in_bytes (TREE_TYPE (from)));
4946 else if (GET_MODE (to_rtx) == BLKmode)
4948 /* Handle calls that return BLKmode values in registers. */
4949 if (REG_P (value))
4950 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4951 else
4952 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4954 else
4956 if (POINTER_TYPE_P (TREE_TYPE (to)))
4957 value = convert_memory_address_addr_space
4958 (GET_MODE (to_rtx), value,
4959 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4961 emit_move_insn (to_rtx, value);
4964 /* Store bounds if required. */
4965 if (bounds
4966 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
4968 gcc_assert (MEM_P (to_rtx));
4969 chkp_emit_bounds_store (bounds, value, to_rtx);
4972 preserve_temp_slots (to_rtx);
4973 pop_temp_slots ();
4974 return;
4977 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4978 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4980 /* Don't move directly into a return register. */
4981 if (TREE_CODE (to) == RESULT_DECL
4982 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4984 rtx temp;
4986 push_temp_slots ();
4988 /* If the source is itself a return value, it still is in a pseudo at
4989 this point so we can move it back to the return register directly. */
4990 if (REG_P (to_rtx)
4991 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4992 && TREE_CODE (from) != CALL_EXPR)
4993 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4994 else
4995 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4997 /* Handle calls that return values in multiple non-contiguous locations.
4998 The Irix 6 ABI has examples of this. */
4999 if (GET_CODE (to_rtx) == PARALLEL)
5001 if (GET_CODE (temp) == PARALLEL)
5002 emit_group_move (to_rtx, temp);
5003 else
5004 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5005 int_size_in_bytes (TREE_TYPE (from)));
5007 else if (temp)
5008 emit_move_insn (to_rtx, temp);
5010 preserve_temp_slots (to_rtx);
5011 pop_temp_slots ();
5012 return;
5015 /* In case we are returning the contents of an object which overlaps
5016 the place the value is being stored, use a safe function when copying
5017 a value through a pointer into a structure value return block. */
5018 if (TREE_CODE (to) == RESULT_DECL
5019 && TREE_CODE (from) == INDIRECT_REF
5020 && ADDR_SPACE_GENERIC_P
5021 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5022 && refs_may_alias_p (to, from)
5023 && cfun->returns_struct
5024 && !cfun->returns_pcc_struct)
5026 rtx from_rtx, size;
5028 push_temp_slots ();
5029 size = expr_size (from);
5030 from_rtx = expand_normal (from);
5032 emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5034 preserve_temp_slots (to_rtx);
5035 pop_temp_slots ();
5036 return;
5039 /* Compute FROM and store the value in the rtx we got. */
5041 push_temp_slots ();
5042 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5043 preserve_temp_slots (result);
5044 pop_temp_slots ();
5045 return;
5048 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5049 succeeded, false otherwise. */
5051 bool
5052 emit_storent_insn (rtx to, rtx from)
5054 struct expand_operand ops[2];
5055 machine_mode mode = GET_MODE (to);
5056 enum insn_code code = optab_handler (storent_optab, mode);
5058 if (code == CODE_FOR_nothing)
5059 return false;
5061 create_fixed_operand (&ops[0], to);
5062 create_input_operand (&ops[1], from, mode);
5063 return maybe_expand_insn (code, 2, ops);
5066 /* Generate code for computing expression EXP,
5067 and storing the value into TARGET.
5069 If the mode is BLKmode then we may return TARGET itself.
5070 It turns out that in BLKmode it doesn't cause a problem.
5071 because C has no operators that could combine two different
5072 assignments into the same BLKmode object with different values
5073 with no sequence point. Will other languages need this to
5074 be more thorough?
5076 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5077 stack, and block moves may need to be treated specially.
5079 If NONTEMPORAL is true, try using a nontemporal store instruction.
5081 If REVERSE is true, the store is to be done in reverse order.
5083 If BTARGET is not NULL then computed bounds of EXP are
5084 associated with BTARGET. */
5087 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5088 bool nontemporal, bool reverse, tree btarget)
5090 rtx temp;
5091 rtx alt_rtl = NULL_RTX;
5092 location_t loc = curr_insn_location ();
5094 if (VOID_TYPE_P (TREE_TYPE (exp)))
5096 /* C++ can generate ?: expressions with a throw expression in one
5097 branch and an rvalue in the other. Here, we resolve attempts to
5098 store the throw expression's nonexistent result. */
5099 gcc_assert (!call_param_p);
5100 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5101 return NULL_RTX;
5103 if (TREE_CODE (exp) == COMPOUND_EXPR)
5105 /* Perform first part of compound expression, then assign from second
5106 part. */
5107 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5108 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5109 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5110 call_param_p, nontemporal, reverse,
5111 btarget);
5113 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5115 /* For conditional expression, get safe form of the target. Then
5116 test the condition, doing the appropriate assignment on either
5117 side. This avoids the creation of unnecessary temporaries.
5118 For non-BLKmode, it is more efficient not to do this. */
5120 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5122 do_pending_stack_adjust ();
5123 NO_DEFER_POP;
5124 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5125 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5126 nontemporal, reverse, btarget);
5127 emit_jump_insn (targetm.gen_jump (lab2));
5128 emit_barrier ();
5129 emit_label (lab1);
5130 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5131 nontemporal, reverse, btarget);
5132 emit_label (lab2);
5133 OK_DEFER_POP;
5135 return NULL_RTX;
5137 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5138 /* If this is a scalar in a register that is stored in a wider mode
5139 than the declared mode, compute the result into its declared mode
5140 and then convert to the wider mode. Our value is the computed
5141 expression. */
5143 rtx inner_target = 0;
5145 /* We can do the conversion inside EXP, which will often result
5146 in some optimizations. Do the conversion in two steps: first
5147 change the signedness, if needed, then the extend. But don't
5148 do this if the type of EXP is a subtype of something else
5149 since then the conversion might involve more than just
5150 converting modes. */
5151 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5152 && TREE_TYPE (TREE_TYPE (exp)) == 0
5153 && GET_MODE_PRECISION (GET_MODE (target))
5154 == TYPE_PRECISION (TREE_TYPE (exp)))
5156 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5157 TYPE_UNSIGNED (TREE_TYPE (exp))))
5159 /* Some types, e.g. Fortran's logical*4, won't have a signed
5160 version, so use the mode instead. */
5161 tree ntype
5162 = (signed_or_unsigned_type_for
5163 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5164 if (ntype == NULL)
5165 ntype = lang_hooks.types.type_for_mode
5166 (TYPE_MODE (TREE_TYPE (exp)),
5167 SUBREG_PROMOTED_SIGN (target));
5169 exp = fold_convert_loc (loc, ntype, exp);
5172 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5173 (GET_MODE (SUBREG_REG (target)),
5174 SUBREG_PROMOTED_SIGN (target)),
5175 exp);
5177 inner_target = SUBREG_REG (target);
5180 temp = expand_expr (exp, inner_target, VOIDmode,
5181 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5183 /* Handle bounds returned by call. */
5184 if (TREE_CODE (exp) == CALL_EXPR)
5186 rtx bounds;
5187 chkp_split_slot (temp, &temp, &bounds);
5188 if (bounds && btarget)
5190 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5191 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5192 chkp_set_rtl_bounds (btarget, tmp);
5196 /* If TEMP is a VOIDmode constant, use convert_modes to make
5197 sure that we properly convert it. */
5198 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5200 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5201 temp, SUBREG_PROMOTED_SIGN (target));
5202 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5203 GET_MODE (target), temp,
5204 SUBREG_PROMOTED_SIGN (target));
5207 convert_move (SUBREG_REG (target), temp,
5208 SUBREG_PROMOTED_SIGN (target));
5210 return NULL_RTX;
5212 else if ((TREE_CODE (exp) == STRING_CST
5213 || (TREE_CODE (exp) == MEM_REF
5214 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5215 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5216 == STRING_CST
5217 && integer_zerop (TREE_OPERAND (exp, 1))))
5218 && !nontemporal && !call_param_p
5219 && MEM_P (target))
5221 /* Optimize initialization of an array with a STRING_CST. */
5222 HOST_WIDE_INT exp_len, str_copy_len;
5223 rtx dest_mem;
5224 tree str = TREE_CODE (exp) == STRING_CST
5225 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5227 exp_len = int_expr_size (exp);
5228 if (exp_len <= 0)
5229 goto normal_expr;
5231 if (TREE_STRING_LENGTH (str) <= 0)
5232 goto normal_expr;
5234 str_copy_len = strlen (TREE_STRING_POINTER (str));
5235 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5236 goto normal_expr;
5238 str_copy_len = TREE_STRING_LENGTH (str);
5239 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5240 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5242 str_copy_len += STORE_MAX_PIECES - 1;
5243 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5245 str_copy_len = MIN (str_copy_len, exp_len);
5246 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5247 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5248 MEM_ALIGN (target), false))
5249 goto normal_expr;
5251 dest_mem = target;
5253 dest_mem = store_by_pieces (dest_mem,
5254 str_copy_len, builtin_strncpy_read_str,
5255 CONST_CAST (char *,
5256 TREE_STRING_POINTER (str)),
5257 MEM_ALIGN (target), false,
5258 exp_len > str_copy_len ? 1 : 0);
5259 if (exp_len > str_copy_len)
5260 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5261 GEN_INT (exp_len - str_copy_len),
5262 BLOCK_OP_NORMAL);
5263 return NULL_RTX;
5265 else
5267 rtx tmp_target;
5269 normal_expr:
5270 /* If we want to use a nontemporal or a reverse order store, force the
5271 value into a register first. */
5272 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5273 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5274 (call_param_p
5275 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5276 &alt_rtl, false);
5278 /* Handle bounds returned by call. */
5279 if (TREE_CODE (exp) == CALL_EXPR)
5281 rtx bounds;
5282 chkp_split_slot (temp, &temp, &bounds);
5283 if (bounds && btarget)
5285 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5286 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5287 chkp_set_rtl_bounds (btarget, tmp);
5292 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5293 the same as that of TARGET, adjust the constant. This is needed, for
5294 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5295 only a word-sized value. */
5296 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5297 && TREE_CODE (exp) != ERROR_MARK
5298 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5299 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5300 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5302 /* If value was not generated in the target, store it there.
5303 Convert the value to TARGET's type first if necessary and emit the
5304 pending incrementations that have been queued when expanding EXP.
5305 Note that we cannot emit the whole queue blindly because this will
5306 effectively disable the POST_INC optimization later.
5308 If TEMP and TARGET compare equal according to rtx_equal_p, but
5309 one or both of them are volatile memory refs, we have to distinguish
5310 two cases:
5311 - expand_expr has used TARGET. In this case, we must not generate
5312 another copy. This can be detected by TARGET being equal according
5313 to == .
5314 - expand_expr has not used TARGET - that means that the source just
5315 happens to have the same RTX form. Since temp will have been created
5316 by expand_expr, it will compare unequal according to == .
5317 We must generate a copy in this case, to reach the correct number
5318 of volatile memory references. */
5320 if ((! rtx_equal_p (temp, target)
5321 || (temp != target && (side_effects_p (temp)
5322 || side_effects_p (target))))
5323 && TREE_CODE (exp) != ERROR_MARK
5324 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5325 but TARGET is not valid memory reference, TEMP will differ
5326 from TARGET although it is really the same location. */
5327 && !(alt_rtl
5328 && rtx_equal_p (alt_rtl, target)
5329 && !side_effects_p (alt_rtl)
5330 && !side_effects_p (target))
5331 /* If there's nothing to copy, don't bother. Don't call
5332 expr_size unless necessary, because some front-ends (C++)
5333 expr_size-hook must not be given objects that are not
5334 supposed to be bit-copied or bit-initialized. */
5335 && expr_size (exp) != const0_rtx)
5337 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5339 if (GET_MODE (target) == BLKmode)
5341 /* Handle calls that return BLKmode values in registers. */
5342 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5343 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5344 else
5345 store_bit_field (target,
5346 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5347 0, 0, 0, GET_MODE (temp), temp, reverse);
5349 else
5350 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5353 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5355 /* Handle copying a string constant into an array. The string
5356 constant may be shorter than the array. So copy just the string's
5357 actual length, and clear the rest. First get the size of the data
5358 type of the string, which is actually the size of the target. */
5359 rtx size = expr_size (exp);
5361 if (CONST_INT_P (size)
5362 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5363 emit_block_move (target, temp, size,
5364 (call_param_p
5365 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5366 else
5368 machine_mode pointer_mode
5369 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5370 machine_mode address_mode = get_address_mode (target);
5372 /* Compute the size of the data to copy from the string. */
5373 tree copy_size
5374 = size_binop_loc (loc, MIN_EXPR,
5375 make_tree (sizetype, size),
5376 size_int (TREE_STRING_LENGTH (exp)));
5377 rtx copy_size_rtx
5378 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5379 (call_param_p
5380 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5381 rtx_code_label *label = 0;
5383 /* Copy that much. */
5384 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5385 TYPE_UNSIGNED (sizetype));
5386 emit_block_move (target, temp, copy_size_rtx,
5387 (call_param_p
5388 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5390 /* Figure out how much is left in TARGET that we have to clear.
5391 Do all calculations in pointer_mode. */
5392 if (CONST_INT_P (copy_size_rtx))
5394 size = plus_constant (address_mode, size,
5395 -INTVAL (copy_size_rtx));
5396 target = adjust_address (target, BLKmode,
5397 INTVAL (copy_size_rtx));
5399 else
5401 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5402 copy_size_rtx, NULL_RTX, 0,
5403 OPTAB_LIB_WIDEN);
5405 if (GET_MODE (copy_size_rtx) != address_mode)
5406 copy_size_rtx = convert_to_mode (address_mode,
5407 copy_size_rtx,
5408 TYPE_UNSIGNED (sizetype));
5410 target = offset_address (target, copy_size_rtx,
5411 highest_pow2_factor (copy_size));
5412 label = gen_label_rtx ();
5413 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5414 GET_MODE (size), 0, label);
5417 if (size != const0_rtx)
5418 clear_storage (target, size, BLOCK_OP_NORMAL);
5420 if (label)
5421 emit_label (label);
5424 /* Handle calls that return values in multiple non-contiguous locations.
5425 The Irix 6 ABI has examples of this. */
5426 else if (GET_CODE (target) == PARALLEL)
5428 if (GET_CODE (temp) == PARALLEL)
5429 emit_group_move (target, temp);
5430 else
5431 emit_group_load (target, temp, TREE_TYPE (exp),
5432 int_size_in_bytes (TREE_TYPE (exp)));
5434 else if (GET_CODE (temp) == PARALLEL)
5435 emit_group_store (target, temp, TREE_TYPE (exp),
5436 int_size_in_bytes (TREE_TYPE (exp)));
5437 else if (GET_MODE (temp) == BLKmode)
5438 emit_block_move (target, temp, expr_size (exp),
5439 (call_param_p
5440 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5441 /* If we emit a nontemporal store, there is nothing else to do. */
5442 else if (nontemporal && emit_storent_insn (target, temp))
5444 else
5446 if (reverse)
5447 temp = flip_storage_order (GET_MODE (target), temp);
5448 temp = force_operand (temp, target);
5449 if (temp != target)
5450 emit_move_insn (target, temp);
5454 return NULL_RTX;
5457 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5459 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5460 bool reverse)
5462 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5463 reverse, NULL);
5466 /* Return true if field F of structure TYPE is a flexible array. */
5468 static bool
5469 flexible_array_member_p (const_tree f, const_tree type)
5471 const_tree tf;
5473 tf = TREE_TYPE (f);
5474 return (DECL_CHAIN (f) == NULL
5475 && TREE_CODE (tf) == ARRAY_TYPE
5476 && TYPE_DOMAIN (tf)
5477 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5478 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5479 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5480 && int_size_in_bytes (type) >= 0);
5483 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5484 must have in order for it to completely initialize a value of type TYPE.
5485 Return -1 if the number isn't known.
5487 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5489 static HOST_WIDE_INT
5490 count_type_elements (const_tree type, bool for_ctor_p)
5492 switch (TREE_CODE (type))
5494 case ARRAY_TYPE:
5496 tree nelts;
5498 nelts = array_type_nelts (type);
5499 if (nelts && tree_fits_uhwi_p (nelts))
5501 unsigned HOST_WIDE_INT n;
5503 n = tree_to_uhwi (nelts) + 1;
5504 if (n == 0 || for_ctor_p)
5505 return n;
5506 else
5507 return n * count_type_elements (TREE_TYPE (type), false);
5509 return for_ctor_p ? -1 : 1;
5512 case RECORD_TYPE:
5514 unsigned HOST_WIDE_INT n;
5515 tree f;
5517 n = 0;
5518 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5519 if (TREE_CODE (f) == FIELD_DECL)
5521 if (!for_ctor_p)
5522 n += count_type_elements (TREE_TYPE (f), false);
5523 else if (!flexible_array_member_p (f, type))
5524 /* Don't count flexible arrays, which are not supposed
5525 to be initialized. */
5526 n += 1;
5529 return n;
5532 case UNION_TYPE:
5533 case QUAL_UNION_TYPE:
5535 tree f;
5536 HOST_WIDE_INT n, m;
5538 gcc_assert (!for_ctor_p);
5539 /* Estimate the number of scalars in each field and pick the
5540 maximum. Other estimates would do instead; the idea is simply
5541 to make sure that the estimate is not sensitive to the ordering
5542 of the fields. */
5543 n = 1;
5544 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5545 if (TREE_CODE (f) == FIELD_DECL)
5547 m = count_type_elements (TREE_TYPE (f), false);
5548 /* If the field doesn't span the whole union, add an extra
5549 scalar for the rest. */
5550 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5551 TYPE_SIZE (type)) != 1)
5552 m++;
5553 if (n < m)
5554 n = m;
5556 return n;
5559 case COMPLEX_TYPE:
5560 return 2;
5562 case VECTOR_TYPE:
5563 return TYPE_VECTOR_SUBPARTS (type);
5565 case INTEGER_TYPE:
5566 case REAL_TYPE:
5567 case FIXED_POINT_TYPE:
5568 case ENUMERAL_TYPE:
5569 case BOOLEAN_TYPE:
5570 case POINTER_TYPE:
5571 case OFFSET_TYPE:
5572 case REFERENCE_TYPE:
5573 case NULLPTR_TYPE:
5574 return 1;
5576 case ERROR_MARK:
5577 return 0;
5579 case VOID_TYPE:
5580 case METHOD_TYPE:
5581 case FUNCTION_TYPE:
5582 case LANG_TYPE:
5583 default:
5584 gcc_unreachable ();
5588 /* Helper for categorize_ctor_elements. Identical interface. */
5590 static bool
5591 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5592 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5594 unsigned HOST_WIDE_INT idx;
5595 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5596 tree value, purpose, elt_type;
5598 /* Whether CTOR is a valid constant initializer, in accordance with what
5599 initializer_constant_valid_p does. If inferred from the constructor
5600 elements, true until proven otherwise. */
5601 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5602 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5604 nz_elts = 0;
5605 init_elts = 0;
5606 num_fields = 0;
5607 elt_type = NULL_TREE;
5609 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5611 HOST_WIDE_INT mult = 1;
5613 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5615 tree lo_index = TREE_OPERAND (purpose, 0);
5616 tree hi_index = TREE_OPERAND (purpose, 1);
5618 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5619 mult = (tree_to_uhwi (hi_index)
5620 - tree_to_uhwi (lo_index) + 1);
5622 num_fields += mult;
5623 elt_type = TREE_TYPE (value);
5625 switch (TREE_CODE (value))
5627 case CONSTRUCTOR:
5629 HOST_WIDE_INT nz = 0, ic = 0;
5631 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5632 p_complete);
5634 nz_elts += mult * nz;
5635 init_elts += mult * ic;
5637 if (const_from_elts_p && const_p)
5638 const_p = const_elt_p;
5640 break;
5642 case INTEGER_CST:
5643 case REAL_CST:
5644 case FIXED_CST:
5645 if (!initializer_zerop (value))
5646 nz_elts += mult;
5647 init_elts += mult;
5648 break;
5650 case STRING_CST:
5651 nz_elts += mult * TREE_STRING_LENGTH (value);
5652 init_elts += mult * TREE_STRING_LENGTH (value);
5653 break;
5655 case COMPLEX_CST:
5656 if (!initializer_zerop (TREE_REALPART (value)))
5657 nz_elts += mult;
5658 if (!initializer_zerop (TREE_IMAGPART (value)))
5659 nz_elts += mult;
5660 init_elts += mult;
5661 break;
5663 case VECTOR_CST:
5665 unsigned i;
5666 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5668 tree v = VECTOR_CST_ELT (value, i);
5669 if (!initializer_zerop (v))
5670 nz_elts += mult;
5671 init_elts += mult;
5674 break;
5676 default:
5678 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5679 nz_elts += mult * tc;
5680 init_elts += mult * tc;
5682 if (const_from_elts_p && const_p)
5683 const_p
5684 = initializer_constant_valid_p (value,
5685 elt_type,
5686 TYPE_REVERSE_STORAGE_ORDER
5687 (TREE_TYPE (ctor)))
5688 != NULL_TREE;
5690 break;
5694 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5695 num_fields, elt_type))
5696 *p_complete = false;
5698 *p_nz_elts += nz_elts;
5699 *p_init_elts += init_elts;
5701 return const_p;
5704 /* Examine CTOR to discover:
5705 * how many scalar fields are set to nonzero values,
5706 and place it in *P_NZ_ELTS;
5707 * how many scalar fields in total are in CTOR,
5708 and place it in *P_ELT_COUNT.
5709 * whether the constructor is complete -- in the sense that every
5710 meaningful byte is explicitly given a value --
5711 and place it in *P_COMPLETE.
5713 Return whether or not CTOR is a valid static constant initializer, the same
5714 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5716 bool
5717 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5718 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5720 *p_nz_elts = 0;
5721 *p_init_elts = 0;
5722 *p_complete = true;
5724 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5727 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5728 of which had type LAST_TYPE. Each element was itself a complete
5729 initializer, in the sense that every meaningful byte was explicitly
5730 given a value. Return true if the same is true for the constructor
5731 as a whole. */
5733 bool
5734 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5735 const_tree last_type)
5737 if (TREE_CODE (type) == UNION_TYPE
5738 || TREE_CODE (type) == QUAL_UNION_TYPE)
5740 if (num_elts == 0)
5741 return false;
5743 gcc_assert (num_elts == 1 && last_type);
5745 /* ??? We could look at each element of the union, and find the
5746 largest element. Which would avoid comparing the size of the
5747 initialized element against any tail padding in the union.
5748 Doesn't seem worth the effort... */
5749 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5752 return count_type_elements (type, true) == num_elts;
5755 /* Return 1 if EXP contains mostly (3/4) zeros. */
5757 static int
5758 mostly_zeros_p (const_tree exp)
5760 if (TREE_CODE (exp) == CONSTRUCTOR)
5762 HOST_WIDE_INT nz_elts, init_elts;
5763 bool complete_p;
5765 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5766 return !complete_p || nz_elts < init_elts / 4;
5769 return initializer_zerop (exp);
5772 /* Return 1 if EXP contains all zeros. */
5774 static int
5775 all_zeros_p (const_tree exp)
5777 if (TREE_CODE (exp) == CONSTRUCTOR)
5779 HOST_WIDE_INT nz_elts, init_elts;
5780 bool complete_p;
5782 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5783 return nz_elts == 0;
5786 return initializer_zerop (exp);
5789 /* Helper function for store_constructor.
5790 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5791 CLEARED is as for store_constructor.
5792 ALIAS_SET is the alias set to use for any stores.
5793 If REVERSE is true, the store is to be done in reverse order.
5795 This provides a recursive shortcut back to store_constructor when it isn't
5796 necessary to go through store_field. This is so that we can pass through
5797 the cleared field to let store_constructor know that we may not have to
5798 clear a substructure if the outer structure has already been cleared. */
5800 static void
5801 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5802 HOST_WIDE_INT bitpos, machine_mode mode,
5803 tree exp, int cleared,
5804 alias_set_type alias_set, bool reverse)
5806 if (TREE_CODE (exp) == CONSTRUCTOR
5807 /* We can only call store_constructor recursively if the size and
5808 bit position are on a byte boundary. */
5809 && bitpos % BITS_PER_UNIT == 0
5810 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5811 /* If we have a nonzero bitpos for a register target, then we just
5812 let store_field do the bitfield handling. This is unlikely to
5813 generate unnecessary clear instructions anyways. */
5814 && (bitpos == 0 || MEM_P (target)))
5816 if (MEM_P (target))
5817 target
5818 = adjust_address (target,
5819 GET_MODE (target) == BLKmode
5820 || 0 != (bitpos
5821 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5822 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5825 /* Update the alias set, if required. */
5826 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5827 && MEM_ALIAS_SET (target) != 0)
5829 target = copy_rtx (target);
5830 set_mem_alias_set (target, alias_set);
5833 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
5834 reverse);
5836 else
5837 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false,
5838 reverse);
5842 /* Returns the number of FIELD_DECLs in TYPE. */
5844 static int
5845 fields_length (const_tree type)
5847 tree t = TYPE_FIELDS (type);
5848 int count = 0;
5850 for (; t; t = DECL_CHAIN (t))
5851 if (TREE_CODE (t) == FIELD_DECL)
5852 ++count;
5854 return count;
5858 /* Store the value of constructor EXP into the rtx TARGET.
5859 TARGET is either a REG or a MEM; we know it cannot conflict, since
5860 safe_from_p has been called.
5861 CLEARED is true if TARGET is known to have been zero'd.
5862 SIZE is the number of bytes of TARGET we are allowed to modify: this
5863 may not be the same as the size of EXP if we are assigning to a field
5864 which has been packed to exclude padding bits.
5865 If REVERSE is true, the store is to be done in reverse order. */
5867 static void
5868 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
5869 bool reverse)
5871 tree type = TREE_TYPE (exp);
5872 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5874 switch (TREE_CODE (type))
5876 case RECORD_TYPE:
5877 case UNION_TYPE:
5878 case QUAL_UNION_TYPE:
5880 unsigned HOST_WIDE_INT idx;
5881 tree field, value;
5883 /* The storage order is specified for every aggregate type. */
5884 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
5886 /* If size is zero or the target is already cleared, do nothing. */
5887 if (size == 0 || cleared)
5888 cleared = 1;
5889 /* We either clear the aggregate or indicate the value is dead. */
5890 else if ((TREE_CODE (type) == UNION_TYPE
5891 || TREE_CODE (type) == QUAL_UNION_TYPE)
5892 && ! CONSTRUCTOR_ELTS (exp))
5893 /* If the constructor is empty, clear the union. */
5895 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5896 cleared = 1;
5899 /* If we are building a static constructor into a register,
5900 set the initial value as zero so we can fold the value into
5901 a constant. But if more than one register is involved,
5902 this probably loses. */
5903 else if (REG_P (target) && TREE_STATIC (exp)
5904 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5906 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5907 cleared = 1;
5910 /* If the constructor has fewer fields than the structure or
5911 if we are initializing the structure to mostly zeros, clear
5912 the whole structure first. Don't do this if TARGET is a
5913 register whose mode size isn't equal to SIZE since
5914 clear_storage can't handle this case. */
5915 else if (size > 0
5916 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5917 != fields_length (type))
5918 || mostly_zeros_p (exp))
5919 && (!REG_P (target)
5920 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5921 == size)))
5923 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5924 cleared = 1;
5927 if (REG_P (target) && !cleared)
5928 emit_clobber (target);
5930 /* Store each element of the constructor into the
5931 corresponding field of TARGET. */
5932 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5934 machine_mode mode;
5935 HOST_WIDE_INT bitsize;
5936 HOST_WIDE_INT bitpos = 0;
5937 tree offset;
5938 rtx to_rtx = target;
5940 /* Just ignore missing fields. We cleared the whole
5941 structure, above, if any fields are missing. */
5942 if (field == 0)
5943 continue;
5945 if (cleared && initializer_zerop (value))
5946 continue;
5948 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5949 bitsize = tree_to_uhwi (DECL_SIZE (field));
5950 else
5951 bitsize = -1;
5953 mode = DECL_MODE (field);
5954 if (DECL_BIT_FIELD (field))
5955 mode = VOIDmode;
5957 offset = DECL_FIELD_OFFSET (field);
5958 if (tree_fits_shwi_p (offset)
5959 && tree_fits_shwi_p (bit_position (field)))
5961 bitpos = int_bit_position (field);
5962 offset = 0;
5964 else
5965 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5967 if (offset)
5969 machine_mode address_mode;
5970 rtx offset_rtx;
5972 offset
5973 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5974 make_tree (TREE_TYPE (exp),
5975 target));
5977 offset_rtx = expand_normal (offset);
5978 gcc_assert (MEM_P (to_rtx));
5980 address_mode = get_address_mode (to_rtx);
5981 if (GET_MODE (offset_rtx) != address_mode)
5982 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5984 to_rtx = offset_address (to_rtx, offset_rtx,
5985 highest_pow2_factor (offset));
5988 /* If this initializes a field that is smaller than a
5989 word, at the start of a word, try to widen it to a full
5990 word. This special case allows us to output C++ member
5991 function initializations in a form that the optimizers
5992 can understand. */
5993 if (WORD_REGISTER_OPERATIONS
5994 && REG_P (target)
5995 && bitsize < BITS_PER_WORD
5996 && bitpos % BITS_PER_WORD == 0
5997 && GET_MODE_CLASS (mode) == MODE_INT
5998 && TREE_CODE (value) == INTEGER_CST
5999 && exp_size >= 0
6000 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6002 tree type = TREE_TYPE (value);
6004 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6006 type = lang_hooks.types.type_for_mode
6007 (word_mode, TYPE_UNSIGNED (type));
6008 value = fold_convert (type, value);
6011 if (BYTES_BIG_ENDIAN)
6012 value
6013 = fold_build2 (LSHIFT_EXPR, type, value,
6014 build_int_cst (type,
6015 BITS_PER_WORD - bitsize));
6016 bitsize = BITS_PER_WORD;
6017 mode = word_mode;
6020 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6021 && DECL_NONADDRESSABLE_P (field))
6023 to_rtx = copy_rtx (to_rtx);
6024 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6027 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6028 value, cleared,
6029 get_alias_set (TREE_TYPE (field)),
6030 reverse);
6032 break;
6034 case ARRAY_TYPE:
6036 tree value, index;
6037 unsigned HOST_WIDE_INT i;
6038 int need_to_clear;
6039 tree domain;
6040 tree elttype = TREE_TYPE (type);
6041 int const_bounds_p;
6042 HOST_WIDE_INT minelt = 0;
6043 HOST_WIDE_INT maxelt = 0;
6045 /* The storage order is specified for every aggregate type. */
6046 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6048 domain = TYPE_DOMAIN (type);
6049 const_bounds_p = (TYPE_MIN_VALUE (domain)
6050 && TYPE_MAX_VALUE (domain)
6051 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6052 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6054 /* If we have constant bounds for the range of the type, get them. */
6055 if (const_bounds_p)
6057 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6058 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6061 /* If the constructor has fewer elements than the array, clear
6062 the whole array first. Similarly if this is static
6063 constructor of a non-BLKmode object. */
6064 if (cleared)
6065 need_to_clear = 0;
6066 else if (REG_P (target) && TREE_STATIC (exp))
6067 need_to_clear = 1;
6068 else
6070 unsigned HOST_WIDE_INT idx;
6071 tree index, value;
6072 HOST_WIDE_INT count = 0, zero_count = 0;
6073 need_to_clear = ! const_bounds_p;
6075 /* This loop is a more accurate version of the loop in
6076 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6077 is also needed to check for missing elements. */
6078 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6080 HOST_WIDE_INT this_node_count;
6082 if (need_to_clear)
6083 break;
6085 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6087 tree lo_index = TREE_OPERAND (index, 0);
6088 tree hi_index = TREE_OPERAND (index, 1);
6090 if (! tree_fits_uhwi_p (lo_index)
6091 || ! tree_fits_uhwi_p (hi_index))
6093 need_to_clear = 1;
6094 break;
6097 this_node_count = (tree_to_uhwi (hi_index)
6098 - tree_to_uhwi (lo_index) + 1);
6100 else
6101 this_node_count = 1;
6103 count += this_node_count;
6104 if (mostly_zeros_p (value))
6105 zero_count += this_node_count;
6108 /* Clear the entire array first if there are any missing
6109 elements, or if the incidence of zero elements is >=
6110 75%. */
6111 if (! need_to_clear
6112 && (count < maxelt - minelt + 1
6113 || 4 * zero_count >= 3 * count))
6114 need_to_clear = 1;
6117 if (need_to_clear && size > 0)
6119 if (REG_P (target))
6120 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6121 else
6122 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6123 cleared = 1;
6126 if (!cleared && REG_P (target))
6127 /* Inform later passes that the old value is dead. */
6128 emit_clobber (target);
6130 /* Store each element of the constructor into the
6131 corresponding element of TARGET, determined by counting the
6132 elements. */
6133 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6135 machine_mode mode;
6136 HOST_WIDE_INT bitsize;
6137 HOST_WIDE_INT bitpos;
6138 rtx xtarget = target;
6140 if (cleared && initializer_zerop (value))
6141 continue;
6143 mode = TYPE_MODE (elttype);
6144 if (mode == BLKmode)
6145 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6146 ? tree_to_uhwi (TYPE_SIZE (elttype))
6147 : -1);
6148 else
6149 bitsize = GET_MODE_BITSIZE (mode);
6151 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6153 tree lo_index = TREE_OPERAND (index, 0);
6154 tree hi_index = TREE_OPERAND (index, 1);
6155 rtx index_r, pos_rtx;
6156 HOST_WIDE_INT lo, hi, count;
6157 tree position;
6159 /* If the range is constant and "small", unroll the loop. */
6160 if (const_bounds_p
6161 && tree_fits_shwi_p (lo_index)
6162 && tree_fits_shwi_p (hi_index)
6163 && (lo = tree_to_shwi (lo_index),
6164 hi = tree_to_shwi (hi_index),
6165 count = hi - lo + 1,
6166 (!MEM_P (target)
6167 || count <= 2
6168 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6169 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6170 <= 40 * 8)))))
6172 lo -= minelt; hi -= minelt;
6173 for (; lo <= hi; lo++)
6175 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6177 if (MEM_P (target)
6178 && !MEM_KEEP_ALIAS_SET_P (target)
6179 && TREE_CODE (type) == ARRAY_TYPE
6180 && TYPE_NONALIASED_COMPONENT (type))
6182 target = copy_rtx (target);
6183 MEM_KEEP_ALIAS_SET_P (target) = 1;
6186 store_constructor_field
6187 (target, bitsize, bitpos, mode, value, cleared,
6188 get_alias_set (elttype), reverse);
6191 else
6193 rtx_code_label *loop_start = gen_label_rtx ();
6194 rtx_code_label *loop_end = gen_label_rtx ();
6195 tree exit_cond;
6197 expand_normal (hi_index);
6199 index = build_decl (EXPR_LOCATION (exp),
6200 VAR_DECL, NULL_TREE, domain);
6201 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6202 SET_DECL_RTL (index, index_r);
6203 store_expr (lo_index, index_r, 0, false, reverse);
6205 /* Build the head of the loop. */
6206 do_pending_stack_adjust ();
6207 emit_label (loop_start);
6209 /* Assign value to element index. */
6210 position =
6211 fold_convert (ssizetype,
6212 fold_build2 (MINUS_EXPR,
6213 TREE_TYPE (index),
6214 index,
6215 TYPE_MIN_VALUE (domain)));
6217 position =
6218 size_binop (MULT_EXPR, position,
6219 fold_convert (ssizetype,
6220 TYPE_SIZE_UNIT (elttype)));
6222 pos_rtx = expand_normal (position);
6223 xtarget = offset_address (target, pos_rtx,
6224 highest_pow2_factor (position));
6225 xtarget = adjust_address (xtarget, mode, 0);
6226 if (TREE_CODE (value) == CONSTRUCTOR)
6227 store_constructor (value, xtarget, cleared,
6228 bitsize / BITS_PER_UNIT, reverse);
6229 else
6230 store_expr (value, xtarget, 0, false, reverse);
6232 /* Generate a conditional jump to exit the loop. */
6233 exit_cond = build2 (LT_EXPR, integer_type_node,
6234 index, hi_index);
6235 jumpif (exit_cond, loop_end, -1);
6237 /* Update the loop counter, and jump to the head of
6238 the loop. */
6239 expand_assignment (index,
6240 build2 (PLUS_EXPR, TREE_TYPE (index),
6241 index, integer_one_node),
6242 false);
6244 emit_jump (loop_start);
6246 /* Build the end of the loop. */
6247 emit_label (loop_end);
6250 else if ((index != 0 && ! tree_fits_shwi_p (index))
6251 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6253 tree position;
6255 if (index == 0)
6256 index = ssize_int (1);
6258 if (minelt)
6259 index = fold_convert (ssizetype,
6260 fold_build2 (MINUS_EXPR,
6261 TREE_TYPE (index),
6262 index,
6263 TYPE_MIN_VALUE (domain)));
6265 position =
6266 size_binop (MULT_EXPR, index,
6267 fold_convert (ssizetype,
6268 TYPE_SIZE_UNIT (elttype)));
6269 xtarget = offset_address (target,
6270 expand_normal (position),
6271 highest_pow2_factor (position));
6272 xtarget = adjust_address (xtarget, mode, 0);
6273 store_expr (value, xtarget, 0, false, reverse);
6275 else
6277 if (index != 0)
6278 bitpos = ((tree_to_shwi (index) - minelt)
6279 * tree_to_uhwi (TYPE_SIZE (elttype)));
6280 else
6281 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6283 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6284 && TREE_CODE (type) == ARRAY_TYPE
6285 && TYPE_NONALIASED_COMPONENT (type))
6287 target = copy_rtx (target);
6288 MEM_KEEP_ALIAS_SET_P (target) = 1;
6290 store_constructor_field (target, bitsize, bitpos, mode, value,
6291 cleared, get_alias_set (elttype),
6292 reverse);
6295 break;
6298 case VECTOR_TYPE:
6300 unsigned HOST_WIDE_INT idx;
6301 constructor_elt *ce;
6302 int i;
6303 int need_to_clear;
6304 int icode = CODE_FOR_nothing;
6305 tree elttype = TREE_TYPE (type);
6306 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6307 machine_mode eltmode = TYPE_MODE (elttype);
6308 HOST_WIDE_INT bitsize;
6309 HOST_WIDE_INT bitpos;
6310 rtvec vector = NULL;
6311 unsigned n_elts;
6312 alias_set_type alias;
6314 gcc_assert (eltmode != BLKmode);
6316 n_elts = TYPE_VECTOR_SUBPARTS (type);
6317 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6319 machine_mode mode = GET_MODE (target);
6321 icode = (int) optab_handler (vec_init_optab, mode);
6322 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6323 if (icode != CODE_FOR_nothing)
6325 tree value;
6327 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6328 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6330 icode = CODE_FOR_nothing;
6331 break;
6334 if (icode != CODE_FOR_nothing)
6336 unsigned int i;
6338 vector = rtvec_alloc (n_elts);
6339 for (i = 0; i < n_elts; i++)
6340 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6344 /* If the constructor has fewer elements than the vector,
6345 clear the whole array first. Similarly if this is static
6346 constructor of a non-BLKmode object. */
6347 if (cleared)
6348 need_to_clear = 0;
6349 else if (REG_P (target) && TREE_STATIC (exp))
6350 need_to_clear = 1;
6351 else
6353 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6354 tree value;
6356 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6358 int n_elts_here = tree_to_uhwi
6359 (int_const_binop (TRUNC_DIV_EXPR,
6360 TYPE_SIZE (TREE_TYPE (value)),
6361 TYPE_SIZE (elttype)));
6363 count += n_elts_here;
6364 if (mostly_zeros_p (value))
6365 zero_count += n_elts_here;
6368 /* Clear the entire vector first if there are any missing elements,
6369 or if the incidence of zero elements is >= 75%. */
6370 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6373 if (need_to_clear && size > 0 && !vector)
6375 if (REG_P (target))
6376 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6377 else
6378 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6379 cleared = 1;
6382 /* Inform later passes that the old value is dead. */
6383 if (!cleared && !vector && REG_P (target))
6384 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6386 if (MEM_P (target))
6387 alias = MEM_ALIAS_SET (target);
6388 else
6389 alias = get_alias_set (elttype);
6391 /* Store each element of the constructor into the corresponding
6392 element of TARGET, determined by counting the elements. */
6393 for (idx = 0, i = 0;
6394 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6395 idx++, i += bitsize / elt_size)
6397 HOST_WIDE_INT eltpos;
6398 tree value = ce->value;
6400 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6401 if (cleared && initializer_zerop (value))
6402 continue;
6404 if (ce->index)
6405 eltpos = tree_to_uhwi (ce->index);
6406 else
6407 eltpos = i;
6409 if (vector)
6411 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6412 elements. */
6413 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6414 RTVEC_ELT (vector, eltpos)
6415 = expand_normal (value);
6417 else
6419 machine_mode value_mode =
6420 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6421 ? TYPE_MODE (TREE_TYPE (value))
6422 : eltmode;
6423 bitpos = eltpos * elt_size;
6424 store_constructor_field (target, bitsize, bitpos, value_mode,
6425 value, cleared, alias, reverse);
6429 if (vector)
6430 emit_insn (GEN_FCN (icode)
6431 (target,
6432 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6433 break;
6436 default:
6437 gcc_unreachable ();
6441 /* Store the value of EXP (an expression tree)
6442 into a subfield of TARGET which has mode MODE and occupies
6443 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6444 If MODE is VOIDmode, it means that we are storing into a bit-field.
6446 BITREGION_START is bitpos of the first bitfield in this region.
6447 BITREGION_END is the bitpos of the ending bitfield in this region.
6448 These two fields are 0, if the C++ memory model does not apply,
6449 or we are not interested in keeping track of bitfield regions.
6451 Always return const0_rtx unless we have something particular to
6452 return.
6454 ALIAS_SET is the alias set for the destination. This value will
6455 (in general) be different from that for TARGET, since TARGET is a
6456 reference to the containing structure.
6458 If NONTEMPORAL is true, try generating a nontemporal store.
6460 If REVERSE is true, the store is to be done in reverse order. */
6462 static rtx
6463 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6464 unsigned HOST_WIDE_INT bitregion_start,
6465 unsigned HOST_WIDE_INT bitregion_end,
6466 machine_mode mode, tree exp,
6467 alias_set_type alias_set, bool nontemporal, bool reverse)
6469 if (TREE_CODE (exp) == ERROR_MARK)
6470 return const0_rtx;
6472 /* If we have nothing to store, do nothing unless the expression has
6473 side-effects. */
6474 if (bitsize == 0)
6475 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6477 if (GET_CODE (target) == CONCAT)
6479 /* We're storing into a struct containing a single __complex. */
6481 gcc_assert (!bitpos);
6482 return store_expr (exp, target, 0, nontemporal, reverse);
6485 /* If the structure is in a register or if the component
6486 is a bit field, we cannot use addressing to access it.
6487 Use bit-field techniques or SUBREG to store in it. */
6489 if (mode == VOIDmode
6490 || (mode != BLKmode && ! direct_store[(int) mode]
6491 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6492 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6493 || REG_P (target)
6494 || GET_CODE (target) == SUBREG
6495 /* If the field isn't aligned enough to store as an ordinary memref,
6496 store it as a bit field. */
6497 || (mode != BLKmode
6498 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6499 || bitpos % GET_MODE_ALIGNMENT (mode))
6500 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6501 || (bitpos % BITS_PER_UNIT != 0)))
6502 || (bitsize >= 0 && mode != BLKmode
6503 && GET_MODE_BITSIZE (mode) > bitsize)
6504 /* If the RHS and field are a constant size and the size of the
6505 RHS isn't the same size as the bitfield, we must use bitfield
6506 operations. */
6507 || (bitsize >= 0
6508 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6509 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6510 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6511 we will handle specially below. */
6512 && !(TREE_CODE (exp) == CONSTRUCTOR
6513 && bitsize % BITS_PER_UNIT == 0)
6514 /* And except for bitwise copying of TREE_ADDRESSABLE types,
6515 where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6516 includes some extra padding. store_expr / expand_expr will in
6517 that case call get_inner_reference that will have the bitsize
6518 we check here and thus the block move will not clobber the
6519 padding that shouldn't be clobbered. In the future we could
6520 replace the TREE_ADDRESSABLE check with a check that
6521 get_base_address needs to live in memory. */
6522 && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6523 || TREE_CODE (exp) != COMPONENT_REF
6524 || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST
6525 || (bitsize % BITS_PER_UNIT != 0)
6526 || (bitpos % BITS_PER_UNIT != 0)
6527 || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize)
6528 != 0)))
6529 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6530 decl we must use bitfield operations. */
6531 || (bitsize >= 0
6532 && TREE_CODE (exp) == MEM_REF
6533 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6534 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6535 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6536 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6538 rtx temp;
6539 gimple *nop_def;
6541 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6542 implies a mask operation. If the precision is the same size as
6543 the field we're storing into, that mask is redundant. This is
6544 particularly common with bit field assignments generated by the
6545 C front end. */
6546 nop_def = get_def_for_expr (exp, NOP_EXPR);
6547 if (nop_def)
6549 tree type = TREE_TYPE (exp);
6550 if (INTEGRAL_TYPE_P (type)
6551 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6552 && bitsize == TYPE_PRECISION (type))
6554 tree op = gimple_assign_rhs1 (nop_def);
6555 type = TREE_TYPE (op);
6556 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6557 exp = op;
6561 temp = expand_normal (exp);
6563 /* If the value has a record type and an integral mode then, if BITSIZE
6564 is narrower than this mode and this is for big-endian data, we must
6565 first put the value into the low-order bits. Moreover, the field may
6566 be not aligned on a byte boundary; in this case, if it has reverse
6567 storage order, it needs to be accessed as a scalar field with reverse
6568 storage order and we must first put the value into target order. */
6569 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
6570 && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6572 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6574 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6576 if (reverse)
6577 temp = flip_storage_order (GET_MODE (temp), temp);
6579 if (bitsize < size
6580 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6581 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6582 size - bitsize, NULL_RTX, 1);
6585 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6586 if (mode != VOIDmode && mode != BLKmode
6587 && mode != TYPE_MODE (TREE_TYPE (exp)))
6588 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6590 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6591 are both BLKmode, both must be in memory and BITPOS must be aligned
6592 on a byte boundary. If so, we simply do a block copy. Likewise for
6593 a BLKmode-like TARGET. */
6594 if (GET_CODE (temp) != PARALLEL
6595 && GET_MODE (temp) == BLKmode
6596 && (GET_MODE (target) == BLKmode
6597 || (MEM_P (target)
6598 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6599 && (bitpos % BITS_PER_UNIT) == 0
6600 && (bitsize % BITS_PER_UNIT) == 0)))
6602 gcc_assert (MEM_P (target) && MEM_P (temp)
6603 && (bitpos % BITS_PER_UNIT) == 0);
6605 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6606 emit_block_move (target, temp,
6607 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6608 / BITS_PER_UNIT),
6609 BLOCK_OP_NORMAL);
6611 return const0_rtx;
6614 /* Handle calls that return values in multiple non-contiguous locations.
6615 The Irix 6 ABI has examples of this. */
6616 if (GET_CODE (temp) == PARALLEL)
6618 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6619 rtx temp_target;
6620 if (mode == BLKmode || mode == VOIDmode)
6621 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6622 temp_target = gen_reg_rtx (mode);
6623 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6624 temp = temp_target;
6626 else if (mode == BLKmode)
6628 /* Handle calls that return BLKmode values in registers. */
6629 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6631 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6632 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6633 temp = temp_target;
6635 else
6637 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6638 rtx temp_target;
6639 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6640 temp_target = gen_reg_rtx (mode);
6641 temp_target
6642 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6643 temp_target, mode, mode, false);
6644 temp = temp_target;
6648 /* Store the value in the bitfield. */
6649 store_bit_field (target, bitsize, bitpos,
6650 bitregion_start, bitregion_end,
6651 mode, temp, reverse);
6653 return const0_rtx;
6655 else
6657 /* Now build a reference to just the desired component. */
6658 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6660 if (to_rtx == target)
6661 to_rtx = copy_rtx (to_rtx);
6663 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6664 set_mem_alias_set (to_rtx, alias_set);
6666 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6667 into a target smaller than its type; handle that case now. */
6668 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6670 gcc_assert (bitsize % BITS_PER_UNIT == 0);
6671 store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
6672 return to_rtx;
6675 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6679 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6680 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6681 codes and find the ultimate containing object, which we return.
6683 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6684 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6685 storage order of the field.
6686 If the position of the field is variable, we store a tree
6687 giving the variable offset (in units) in *POFFSET.
6688 This offset is in addition to the bit position.
6689 If the position is not variable, we store 0 in *POFFSET.
6691 If any of the extraction expressions is volatile,
6692 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6694 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6695 Otherwise, it is a mode that can be used to access the field.
6697 If the field describes a variable-sized object, *PMODE is set to
6698 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6699 this case, but the address of the object can be found.
6701 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6702 look through nodes that serve as markers of a greater alignment than
6703 the one that can be deduced from the expression. These nodes make it
6704 possible for front-ends to prevent temporaries from being created by
6705 the middle-end on alignment considerations. For that purpose, the
6706 normal operating mode at high-level is to always pass FALSE so that
6707 the ultimate containing object is really returned; moreover, the
6708 associated predicate handled_component_p will always return TRUE
6709 on these nodes, thus indicating that they are essentially handled
6710 by get_inner_reference. TRUE should only be passed when the caller
6711 is scanning the expression in order to build another representation
6712 and specifically knows how to handle these nodes; as such, this is
6713 the normal operating mode in the RTL expanders. */
6715 tree
6716 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6717 HOST_WIDE_INT *pbitpos, tree *poffset,
6718 machine_mode *pmode, int *punsignedp,
6719 int *preversep, int *pvolatilep, bool keep_aligning)
6721 tree size_tree = 0;
6722 machine_mode mode = VOIDmode;
6723 bool blkmode_bitfield = false;
6724 tree offset = size_zero_node;
6725 offset_int bit_offset = 0;
6727 /* First get the mode, signedness, storage order and size. We do this from
6728 just the outermost expression. */
6729 *pbitsize = -1;
6730 if (TREE_CODE (exp) == COMPONENT_REF)
6732 tree field = TREE_OPERAND (exp, 1);
6733 size_tree = DECL_SIZE (field);
6734 if (flag_strict_volatile_bitfields > 0
6735 && TREE_THIS_VOLATILE (exp)
6736 && DECL_BIT_FIELD_TYPE (field)
6737 && DECL_MODE (field) != BLKmode)
6738 /* Volatile bitfields should be accessed in the mode of the
6739 field's type, not the mode computed based on the bit
6740 size. */
6741 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6742 else if (!DECL_BIT_FIELD (field))
6743 mode = DECL_MODE (field);
6744 else if (DECL_MODE (field) == BLKmode)
6745 blkmode_bitfield = true;
6747 *punsignedp = DECL_UNSIGNED (field);
6749 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6751 size_tree = TREE_OPERAND (exp, 1);
6752 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6753 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6755 /* For vector types, with the correct size of access, use the mode of
6756 inner type. */
6757 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6758 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6759 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6760 mode = TYPE_MODE (TREE_TYPE (exp));
6762 else
6764 mode = TYPE_MODE (TREE_TYPE (exp));
6765 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6767 if (mode == BLKmode)
6768 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6769 else
6770 *pbitsize = GET_MODE_BITSIZE (mode);
6773 if (size_tree != 0)
6775 if (! tree_fits_uhwi_p (size_tree))
6776 mode = BLKmode, *pbitsize = -1;
6777 else
6778 *pbitsize = tree_to_uhwi (size_tree);
6781 *preversep = reverse_storage_order_for_component_p (exp);
6783 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6784 and find the ultimate containing object. */
6785 while (1)
6787 switch (TREE_CODE (exp))
6789 case BIT_FIELD_REF:
6790 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6791 break;
6793 case COMPONENT_REF:
6795 tree field = TREE_OPERAND (exp, 1);
6796 tree this_offset = component_ref_field_offset (exp);
6798 /* If this field hasn't been filled in yet, don't go past it.
6799 This should only happen when folding expressions made during
6800 type construction. */
6801 if (this_offset == 0)
6802 break;
6804 offset = size_binop (PLUS_EXPR, offset, this_offset);
6805 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6807 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6809 break;
6811 case ARRAY_REF:
6812 case ARRAY_RANGE_REF:
6814 tree index = TREE_OPERAND (exp, 1);
6815 tree low_bound = array_ref_low_bound (exp);
6816 tree unit_size = array_ref_element_size (exp);
6818 /* We assume all arrays have sizes that are a multiple of a byte.
6819 First subtract the lower bound, if any, in the type of the
6820 index, then convert to sizetype and multiply by the size of
6821 the array element. */
6822 if (! integer_zerop (low_bound))
6823 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6824 index, low_bound);
6826 offset = size_binop (PLUS_EXPR, offset,
6827 size_binop (MULT_EXPR,
6828 fold_convert (sizetype, index),
6829 unit_size));
6831 break;
6833 case REALPART_EXPR:
6834 break;
6836 case IMAGPART_EXPR:
6837 bit_offset += *pbitsize;
6838 break;
6840 case VIEW_CONVERT_EXPR:
6841 if (keep_aligning && STRICT_ALIGNMENT
6842 && (TYPE_ALIGN (TREE_TYPE (exp))
6843 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6844 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6845 < BIGGEST_ALIGNMENT)
6846 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6847 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6848 goto done;
6849 break;
6851 case MEM_REF:
6852 /* Hand back the decl for MEM[&decl, off]. */
6853 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6855 tree off = TREE_OPERAND (exp, 1);
6856 if (!integer_zerop (off))
6858 offset_int boff, coff = mem_ref_offset (exp);
6859 boff = coff << LOG2_BITS_PER_UNIT;
6860 bit_offset += boff;
6862 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6864 goto done;
6866 default:
6867 goto done;
6870 /* If any reference in the chain is volatile, the effect is volatile. */
6871 if (TREE_THIS_VOLATILE (exp))
6872 *pvolatilep = 1;
6874 exp = TREE_OPERAND (exp, 0);
6876 done:
6878 /* If OFFSET is constant, see if we can return the whole thing as a
6879 constant bit position. Make sure to handle overflow during
6880 this conversion. */
6881 if (TREE_CODE (offset) == INTEGER_CST)
6883 offset_int tem = wi::sext (wi::to_offset (offset),
6884 TYPE_PRECISION (sizetype));
6885 tem <<= LOG2_BITS_PER_UNIT;
6886 tem += bit_offset;
6887 if (wi::fits_shwi_p (tem))
6889 *pbitpos = tem.to_shwi ();
6890 *poffset = offset = NULL_TREE;
6894 /* Otherwise, split it up. */
6895 if (offset)
6897 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6898 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6900 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6901 offset_int tem = bit_offset.and_not (mask);
6902 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6903 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6904 bit_offset -= tem;
6905 tem >>= LOG2_BITS_PER_UNIT;
6906 offset = size_binop (PLUS_EXPR, offset,
6907 wide_int_to_tree (sizetype, tem));
6910 *pbitpos = bit_offset.to_shwi ();
6911 *poffset = offset;
6914 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6915 if (mode == VOIDmode
6916 && blkmode_bitfield
6917 && (*pbitpos % BITS_PER_UNIT) == 0
6918 && (*pbitsize % BITS_PER_UNIT) == 0)
6919 *pmode = BLKmode;
6920 else
6921 *pmode = mode;
6923 return exp;
6926 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6928 static unsigned HOST_WIDE_INT
6929 target_align (const_tree target)
6931 /* We might have a chain of nested references with intermediate misaligning
6932 bitfields components, so need to recurse to find out. */
6934 unsigned HOST_WIDE_INT this_align, outer_align;
6936 switch (TREE_CODE (target))
6938 case BIT_FIELD_REF:
6939 return 1;
6941 case COMPONENT_REF:
6942 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6943 outer_align = target_align (TREE_OPERAND (target, 0));
6944 return MIN (this_align, outer_align);
6946 case ARRAY_REF:
6947 case ARRAY_RANGE_REF:
6948 this_align = TYPE_ALIGN (TREE_TYPE (target));
6949 outer_align = target_align (TREE_OPERAND (target, 0));
6950 return MIN (this_align, outer_align);
6952 CASE_CONVERT:
6953 case NON_LVALUE_EXPR:
6954 case VIEW_CONVERT_EXPR:
6955 this_align = TYPE_ALIGN (TREE_TYPE (target));
6956 outer_align = target_align (TREE_OPERAND (target, 0));
6957 return MAX (this_align, outer_align);
6959 default:
6960 return TYPE_ALIGN (TREE_TYPE (target));
6965 /* Given an rtx VALUE that may contain additions and multiplications, return
6966 an equivalent value that just refers to a register, memory, or constant.
6967 This is done by generating instructions to perform the arithmetic and
6968 returning a pseudo-register containing the value.
6970 The returned value may be a REG, SUBREG, MEM or constant. */
6973 force_operand (rtx value, rtx target)
6975 rtx op1, op2;
6976 /* Use subtarget as the target for operand 0 of a binary operation. */
6977 rtx subtarget = get_subtarget (target);
6978 enum rtx_code code = GET_CODE (value);
6980 /* Check for subreg applied to an expression produced by loop optimizer. */
6981 if (code == SUBREG
6982 && !REG_P (SUBREG_REG (value))
6983 && !MEM_P (SUBREG_REG (value)))
6985 value
6986 = simplify_gen_subreg (GET_MODE (value),
6987 force_reg (GET_MODE (SUBREG_REG (value)),
6988 force_operand (SUBREG_REG (value),
6989 NULL_RTX)),
6990 GET_MODE (SUBREG_REG (value)),
6991 SUBREG_BYTE (value));
6992 code = GET_CODE (value);
6995 /* Check for a PIC address load. */
6996 if ((code == PLUS || code == MINUS)
6997 && XEXP (value, 0) == pic_offset_table_rtx
6998 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6999 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7000 || GET_CODE (XEXP (value, 1)) == CONST))
7002 if (!subtarget)
7003 subtarget = gen_reg_rtx (GET_MODE (value));
7004 emit_move_insn (subtarget, value);
7005 return subtarget;
7008 if (ARITHMETIC_P (value))
7010 op2 = XEXP (value, 1);
7011 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7012 subtarget = 0;
7013 if (code == MINUS && CONST_INT_P (op2))
7015 code = PLUS;
7016 op2 = negate_rtx (GET_MODE (value), op2);
7019 /* Check for an addition with OP2 a constant integer and our first
7020 operand a PLUS of a virtual register and something else. In that
7021 case, we want to emit the sum of the virtual register and the
7022 constant first and then add the other value. This allows virtual
7023 register instantiation to simply modify the constant rather than
7024 creating another one around this addition. */
7025 if (code == PLUS && CONST_INT_P (op2)
7026 && GET_CODE (XEXP (value, 0)) == PLUS
7027 && REG_P (XEXP (XEXP (value, 0), 0))
7028 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7029 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7031 rtx temp = expand_simple_binop (GET_MODE (value), code,
7032 XEXP (XEXP (value, 0), 0), op2,
7033 subtarget, 0, OPTAB_LIB_WIDEN);
7034 return expand_simple_binop (GET_MODE (value), code, temp,
7035 force_operand (XEXP (XEXP (value,
7036 0), 1), 0),
7037 target, 0, OPTAB_LIB_WIDEN);
7040 op1 = force_operand (XEXP (value, 0), subtarget);
7041 op2 = force_operand (op2, NULL_RTX);
7042 switch (code)
7044 case MULT:
7045 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7046 case DIV:
7047 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7048 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7049 target, 1, OPTAB_LIB_WIDEN);
7050 else
7051 return expand_divmod (0,
7052 FLOAT_MODE_P (GET_MODE (value))
7053 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7054 GET_MODE (value), op1, op2, target, 0);
7055 case MOD:
7056 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7057 target, 0);
7058 case UDIV:
7059 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7060 target, 1);
7061 case UMOD:
7062 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7063 target, 1);
7064 case ASHIFTRT:
7065 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7066 target, 0, OPTAB_LIB_WIDEN);
7067 default:
7068 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7069 target, 1, OPTAB_LIB_WIDEN);
7072 if (UNARY_P (value))
7074 if (!target)
7075 target = gen_reg_rtx (GET_MODE (value));
7076 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7077 switch (code)
7079 case ZERO_EXTEND:
7080 case SIGN_EXTEND:
7081 case TRUNCATE:
7082 case FLOAT_EXTEND:
7083 case FLOAT_TRUNCATE:
7084 convert_move (target, op1, code == ZERO_EXTEND);
7085 return target;
7087 case FIX:
7088 case UNSIGNED_FIX:
7089 expand_fix (target, op1, code == UNSIGNED_FIX);
7090 return target;
7092 case FLOAT:
7093 case UNSIGNED_FLOAT:
7094 expand_float (target, op1, code == UNSIGNED_FLOAT);
7095 return target;
7097 default:
7098 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7102 #ifdef INSN_SCHEDULING
7103 /* On machines that have insn scheduling, we want all memory reference to be
7104 explicit, so we need to deal with such paradoxical SUBREGs. */
7105 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7106 value
7107 = simplify_gen_subreg (GET_MODE (value),
7108 force_reg (GET_MODE (SUBREG_REG (value)),
7109 force_operand (SUBREG_REG (value),
7110 NULL_RTX)),
7111 GET_MODE (SUBREG_REG (value)),
7112 SUBREG_BYTE (value));
7113 #endif
7115 return value;
7118 /* Subroutine of expand_expr: return nonzero iff there is no way that
7119 EXP can reference X, which is being modified. TOP_P is nonzero if this
7120 call is going to be used to determine whether we need a temporary
7121 for EXP, as opposed to a recursive call to this function.
7123 It is always safe for this routine to return zero since it merely
7124 searches for optimization opportunities. */
7127 safe_from_p (const_rtx x, tree exp, int top_p)
7129 rtx exp_rtl = 0;
7130 int i, nops;
7132 if (x == 0
7133 /* If EXP has varying size, we MUST use a target since we currently
7134 have no way of allocating temporaries of variable size
7135 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7136 So we assume here that something at a higher level has prevented a
7137 clash. This is somewhat bogus, but the best we can do. Only
7138 do this when X is BLKmode and when we are at the top level. */
7139 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7140 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7141 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7142 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7143 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7144 != INTEGER_CST)
7145 && GET_MODE (x) == BLKmode)
7146 /* If X is in the outgoing argument area, it is always safe. */
7147 || (MEM_P (x)
7148 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7149 || (GET_CODE (XEXP (x, 0)) == PLUS
7150 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7151 return 1;
7153 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7154 find the underlying pseudo. */
7155 if (GET_CODE (x) == SUBREG)
7157 x = SUBREG_REG (x);
7158 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7159 return 0;
7162 /* Now look at our tree code and possibly recurse. */
7163 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7165 case tcc_declaration:
7166 exp_rtl = DECL_RTL_IF_SET (exp);
7167 break;
7169 case tcc_constant:
7170 return 1;
7172 case tcc_exceptional:
7173 if (TREE_CODE (exp) == TREE_LIST)
7175 while (1)
7177 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7178 return 0;
7179 exp = TREE_CHAIN (exp);
7180 if (!exp)
7181 return 1;
7182 if (TREE_CODE (exp) != TREE_LIST)
7183 return safe_from_p (x, exp, 0);
7186 else if (TREE_CODE (exp) == CONSTRUCTOR)
7188 constructor_elt *ce;
7189 unsigned HOST_WIDE_INT idx;
7191 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7192 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7193 || !safe_from_p (x, ce->value, 0))
7194 return 0;
7195 return 1;
7197 else if (TREE_CODE (exp) == ERROR_MARK)
7198 return 1; /* An already-visited SAVE_EXPR? */
7199 else
7200 return 0;
7202 case tcc_statement:
7203 /* The only case we look at here is the DECL_INITIAL inside a
7204 DECL_EXPR. */
7205 return (TREE_CODE (exp) != DECL_EXPR
7206 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7207 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7208 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7210 case tcc_binary:
7211 case tcc_comparison:
7212 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7213 return 0;
7214 /* Fall through. */
7216 case tcc_unary:
7217 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7219 case tcc_expression:
7220 case tcc_reference:
7221 case tcc_vl_exp:
7222 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7223 the expression. If it is set, we conflict iff we are that rtx or
7224 both are in memory. Otherwise, we check all operands of the
7225 expression recursively. */
7227 switch (TREE_CODE (exp))
7229 case ADDR_EXPR:
7230 /* If the operand is static or we are static, we can't conflict.
7231 Likewise if we don't conflict with the operand at all. */
7232 if (staticp (TREE_OPERAND (exp, 0))
7233 || TREE_STATIC (exp)
7234 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7235 return 1;
7237 /* Otherwise, the only way this can conflict is if we are taking
7238 the address of a DECL a that address if part of X, which is
7239 very rare. */
7240 exp = TREE_OPERAND (exp, 0);
7241 if (DECL_P (exp))
7243 if (!DECL_RTL_SET_P (exp)
7244 || !MEM_P (DECL_RTL (exp)))
7245 return 0;
7246 else
7247 exp_rtl = XEXP (DECL_RTL (exp), 0);
7249 break;
7251 case MEM_REF:
7252 if (MEM_P (x)
7253 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7254 get_alias_set (exp)))
7255 return 0;
7256 break;
7258 case CALL_EXPR:
7259 /* Assume that the call will clobber all hard registers and
7260 all of memory. */
7261 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7262 || MEM_P (x))
7263 return 0;
7264 break;
7266 case WITH_CLEANUP_EXPR:
7267 case CLEANUP_POINT_EXPR:
7268 /* Lowered by gimplify.c. */
7269 gcc_unreachable ();
7271 case SAVE_EXPR:
7272 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7274 default:
7275 break;
7278 /* If we have an rtx, we do not need to scan our operands. */
7279 if (exp_rtl)
7280 break;
7282 nops = TREE_OPERAND_LENGTH (exp);
7283 for (i = 0; i < nops; i++)
7284 if (TREE_OPERAND (exp, i) != 0
7285 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7286 return 0;
7288 break;
7290 case tcc_type:
7291 /* Should never get a type here. */
7292 gcc_unreachable ();
7295 /* If we have an rtl, find any enclosed object. Then see if we conflict
7296 with it. */
7297 if (exp_rtl)
7299 if (GET_CODE (exp_rtl) == SUBREG)
7301 exp_rtl = SUBREG_REG (exp_rtl);
7302 if (REG_P (exp_rtl)
7303 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7304 return 0;
7307 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7308 are memory and they conflict. */
7309 return ! (rtx_equal_p (x, exp_rtl)
7310 || (MEM_P (x) && MEM_P (exp_rtl)
7311 && true_dependence (exp_rtl, VOIDmode, x)));
7314 /* If we reach here, it is safe. */
7315 return 1;
7319 /* Return the highest power of two that EXP is known to be a multiple of.
7320 This is used in updating alignment of MEMs in array references. */
7322 unsigned HOST_WIDE_INT
7323 highest_pow2_factor (const_tree exp)
7325 unsigned HOST_WIDE_INT ret;
7326 int trailing_zeros = tree_ctz (exp);
7327 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7328 return BIGGEST_ALIGNMENT;
7329 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7330 if (ret > BIGGEST_ALIGNMENT)
7331 return BIGGEST_ALIGNMENT;
7332 return ret;
7335 /* Similar, except that the alignment requirements of TARGET are
7336 taken into account. Assume it is at least as aligned as its
7337 type, unless it is a COMPONENT_REF in which case the layout of
7338 the structure gives the alignment. */
7340 static unsigned HOST_WIDE_INT
7341 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7343 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7344 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7346 return MAX (factor, talign);
7349 /* Convert the tree comparison code TCODE to the rtl one where the
7350 signedness is UNSIGNEDP. */
7352 static enum rtx_code
7353 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7355 enum rtx_code code;
7356 switch (tcode)
7358 case EQ_EXPR:
7359 code = EQ;
7360 break;
7361 case NE_EXPR:
7362 code = NE;
7363 break;
7364 case LT_EXPR:
7365 code = unsignedp ? LTU : LT;
7366 break;
7367 case LE_EXPR:
7368 code = unsignedp ? LEU : LE;
7369 break;
7370 case GT_EXPR:
7371 code = unsignedp ? GTU : GT;
7372 break;
7373 case GE_EXPR:
7374 code = unsignedp ? GEU : GE;
7375 break;
7376 case UNORDERED_EXPR:
7377 code = UNORDERED;
7378 break;
7379 case ORDERED_EXPR:
7380 code = ORDERED;
7381 break;
7382 case UNLT_EXPR:
7383 code = UNLT;
7384 break;
7385 case UNLE_EXPR:
7386 code = UNLE;
7387 break;
7388 case UNGT_EXPR:
7389 code = UNGT;
7390 break;
7391 case UNGE_EXPR:
7392 code = UNGE;
7393 break;
7394 case UNEQ_EXPR:
7395 code = UNEQ;
7396 break;
7397 case LTGT_EXPR:
7398 code = LTGT;
7399 break;
7401 default:
7402 gcc_unreachable ();
7404 return code;
7407 /* Subroutine of expand_expr. Expand the two operands of a binary
7408 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7409 The value may be stored in TARGET if TARGET is nonzero. The
7410 MODIFIER argument is as documented by expand_expr. */
7412 void
7413 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7414 enum expand_modifier modifier)
7416 if (! safe_from_p (target, exp1, 1))
7417 target = 0;
7418 if (operand_equal_p (exp0, exp1, 0))
7420 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7421 *op1 = copy_rtx (*op0);
7423 else
7425 /* If we need to preserve evaluation order, copy exp0 into its own
7426 temporary variable so that it can't be clobbered by exp1. */
7427 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7428 exp0 = save_expr (exp0);
7429 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7430 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7435 /* Return a MEM that contains constant EXP. DEFER is as for
7436 output_constant_def and MODIFIER is as for expand_expr. */
7438 static rtx
7439 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7441 rtx mem;
7443 mem = output_constant_def (exp, defer);
7444 if (modifier != EXPAND_INITIALIZER)
7445 mem = use_anchored_address (mem);
7446 return mem;
7449 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7450 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7452 static rtx
7453 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7454 enum expand_modifier modifier, addr_space_t as)
7456 rtx result, subtarget;
7457 tree inner, offset;
7458 HOST_WIDE_INT bitsize, bitpos;
7459 int unsignedp, reversep, volatilep = 0;
7460 machine_mode mode1;
7462 /* If we are taking the address of a constant and are at the top level,
7463 we have to use output_constant_def since we can't call force_const_mem
7464 at top level. */
7465 /* ??? This should be considered a front-end bug. We should not be
7466 generating ADDR_EXPR of something that isn't an LVALUE. The only
7467 exception here is STRING_CST. */
7468 if (CONSTANT_CLASS_P (exp))
7470 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7471 if (modifier < EXPAND_SUM)
7472 result = force_operand (result, target);
7473 return result;
7476 /* Everything must be something allowed by is_gimple_addressable. */
7477 switch (TREE_CODE (exp))
7479 case INDIRECT_REF:
7480 /* This case will happen via recursion for &a->b. */
7481 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7483 case MEM_REF:
7485 tree tem = TREE_OPERAND (exp, 0);
7486 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7487 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7488 return expand_expr (tem, target, tmode, modifier);
7491 case CONST_DECL:
7492 /* Expand the initializer like constants above. */
7493 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7494 0, modifier), 0);
7495 if (modifier < EXPAND_SUM)
7496 result = force_operand (result, target);
7497 return result;
7499 case REALPART_EXPR:
7500 /* The real part of the complex number is always first, therefore
7501 the address is the same as the address of the parent object. */
7502 offset = 0;
7503 bitpos = 0;
7504 inner = TREE_OPERAND (exp, 0);
7505 break;
7507 case IMAGPART_EXPR:
7508 /* The imaginary part of the complex number is always second.
7509 The expression is therefore always offset by the size of the
7510 scalar type. */
7511 offset = 0;
7512 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7513 inner = TREE_OPERAND (exp, 0);
7514 break;
7516 case COMPOUND_LITERAL_EXPR:
7517 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7518 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7519 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7520 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7521 the initializers aren't gimplified. */
7522 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7523 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7524 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7525 target, tmode, modifier, as);
7526 /* FALLTHRU */
7527 default:
7528 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7529 expand_expr, as that can have various side effects; LABEL_DECLs for
7530 example, may not have their DECL_RTL set yet. Expand the rtl of
7531 CONSTRUCTORs too, which should yield a memory reference for the
7532 constructor's contents. Assume language specific tree nodes can
7533 be expanded in some interesting way. */
7534 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7535 if (DECL_P (exp)
7536 || TREE_CODE (exp) == CONSTRUCTOR
7537 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7539 result = expand_expr (exp, target, tmode,
7540 modifier == EXPAND_INITIALIZER
7541 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7543 /* If the DECL isn't in memory, then the DECL wasn't properly
7544 marked TREE_ADDRESSABLE, which will be either a front-end
7545 or a tree optimizer bug. */
7547 gcc_assert (MEM_P (result));
7548 result = XEXP (result, 0);
7550 /* ??? Is this needed anymore? */
7551 if (DECL_P (exp))
7552 TREE_USED (exp) = 1;
7554 if (modifier != EXPAND_INITIALIZER
7555 && modifier != EXPAND_CONST_ADDRESS
7556 && modifier != EXPAND_SUM)
7557 result = force_operand (result, target);
7558 return result;
7561 /* Pass FALSE as the last argument to get_inner_reference although
7562 we are expanding to RTL. The rationale is that we know how to
7563 handle "aligning nodes" here: we can just bypass them because
7564 they won't change the final object whose address will be returned
7565 (they actually exist only for that purpose). */
7566 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7567 &unsignedp, &reversep, &volatilep, false);
7568 break;
7571 /* We must have made progress. */
7572 gcc_assert (inner != exp);
7574 subtarget = offset || bitpos ? NULL_RTX : target;
7575 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7576 inner alignment, force the inner to be sufficiently aligned. */
7577 if (CONSTANT_CLASS_P (inner)
7578 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7580 inner = copy_node (inner);
7581 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7582 SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
7583 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7585 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7587 if (offset)
7589 rtx tmp;
7591 if (modifier != EXPAND_NORMAL)
7592 result = force_operand (result, NULL);
7593 tmp = expand_expr (offset, NULL_RTX, tmode,
7594 modifier == EXPAND_INITIALIZER
7595 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7597 /* expand_expr is allowed to return an object in a mode other
7598 than TMODE. If it did, we need to convert. */
7599 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7600 tmp = convert_modes (tmode, GET_MODE (tmp),
7601 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7602 result = convert_memory_address_addr_space (tmode, result, as);
7603 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7605 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7606 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7607 else
7609 subtarget = bitpos ? NULL_RTX : target;
7610 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7611 1, OPTAB_LIB_WIDEN);
7615 if (bitpos)
7617 /* Someone beforehand should have rejected taking the address
7618 of such an object. */
7619 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7621 result = convert_memory_address_addr_space (tmode, result, as);
7622 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7623 if (modifier < EXPAND_SUM)
7624 result = force_operand (result, target);
7627 return result;
7630 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7631 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7633 static rtx
7634 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7635 enum expand_modifier modifier)
7637 addr_space_t as = ADDR_SPACE_GENERIC;
7638 machine_mode address_mode = Pmode;
7639 machine_mode pointer_mode = ptr_mode;
7640 machine_mode rmode;
7641 rtx result;
7643 /* Target mode of VOIDmode says "whatever's natural". */
7644 if (tmode == VOIDmode)
7645 tmode = TYPE_MODE (TREE_TYPE (exp));
7647 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7649 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7650 address_mode = targetm.addr_space.address_mode (as);
7651 pointer_mode = targetm.addr_space.pointer_mode (as);
7654 /* We can get called with some Weird Things if the user does silliness
7655 like "(short) &a". In that case, convert_memory_address won't do
7656 the right thing, so ignore the given target mode. */
7657 if (tmode != address_mode && tmode != pointer_mode)
7658 tmode = address_mode;
7660 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7661 tmode, modifier, as);
7663 /* Despite expand_expr claims concerning ignoring TMODE when not
7664 strictly convenient, stuff breaks if we don't honor it. Note
7665 that combined with the above, we only do this for pointer modes. */
7666 rmode = GET_MODE (result);
7667 if (rmode == VOIDmode)
7668 rmode = tmode;
7669 if (rmode != tmode)
7670 result = convert_memory_address_addr_space (tmode, result, as);
7672 return result;
7675 /* Generate code for computing CONSTRUCTOR EXP.
7676 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7677 is TRUE, instead of creating a temporary variable in memory
7678 NULL is returned and the caller needs to handle it differently. */
7680 static rtx
7681 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7682 bool avoid_temp_mem)
7684 tree type = TREE_TYPE (exp);
7685 machine_mode mode = TYPE_MODE (type);
7687 /* Try to avoid creating a temporary at all. This is possible
7688 if all of the initializer is zero.
7689 FIXME: try to handle all [0..255] initializers we can handle
7690 with memset. */
7691 if (TREE_STATIC (exp)
7692 && !TREE_ADDRESSABLE (exp)
7693 && target != 0 && mode == BLKmode
7694 && all_zeros_p (exp))
7696 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7697 return target;
7700 /* All elts simple constants => refer to a constant in memory. But
7701 if this is a non-BLKmode mode, let it store a field at a time
7702 since that should make a CONST_INT, CONST_WIDE_INT or
7703 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7704 use, it is best to store directly into the target unless the type
7705 is large enough that memcpy will be used. If we are making an
7706 initializer and all operands are constant, put it in memory as
7707 well.
7709 FIXME: Avoid trying to fill vector constructors piece-meal.
7710 Output them with output_constant_def below unless we're sure
7711 they're zeros. This should go away when vector initializers
7712 are treated like VECTOR_CST instead of arrays. */
7713 if ((TREE_STATIC (exp)
7714 && ((mode == BLKmode
7715 && ! (target != 0 && safe_from_p (target, exp, 1)))
7716 || TREE_ADDRESSABLE (exp)
7717 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7718 && (! can_move_by_pieces
7719 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7720 TYPE_ALIGN (type)))
7721 && ! mostly_zeros_p (exp))))
7722 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7723 && TREE_CONSTANT (exp)))
7725 rtx constructor;
7727 if (avoid_temp_mem)
7728 return NULL_RTX;
7730 constructor = expand_expr_constant (exp, 1, modifier);
7732 if (modifier != EXPAND_CONST_ADDRESS
7733 && modifier != EXPAND_INITIALIZER
7734 && modifier != EXPAND_SUM)
7735 constructor = validize_mem (constructor);
7737 return constructor;
7740 /* Handle calls that pass values in multiple non-contiguous
7741 locations. The Irix 6 ABI has examples of this. */
7742 if (target == 0 || ! safe_from_p (target, exp, 1)
7743 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7745 if (avoid_temp_mem)
7746 return NULL_RTX;
7748 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7751 store_constructor (exp, target, 0, int_expr_size (exp), false);
7752 return target;
7756 /* expand_expr: generate code for computing expression EXP.
7757 An rtx for the computed value is returned. The value is never null.
7758 In the case of a void EXP, const0_rtx is returned.
7760 The value may be stored in TARGET if TARGET is nonzero.
7761 TARGET is just a suggestion; callers must assume that
7762 the rtx returned may not be the same as TARGET.
7764 If TARGET is CONST0_RTX, it means that the value will be ignored.
7766 If TMODE is not VOIDmode, it suggests generating the
7767 result in mode TMODE. But this is done only when convenient.
7768 Otherwise, TMODE is ignored and the value generated in its natural mode.
7769 TMODE is just a suggestion; callers must assume that
7770 the rtx returned may not have mode TMODE.
7772 Note that TARGET may have neither TMODE nor MODE. In that case, it
7773 probably will not be used.
7775 If MODIFIER is EXPAND_SUM then when EXP is an addition
7776 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7777 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7778 products as above, or REG or MEM, or constant.
7779 Ordinarily in such cases we would output mul or add instructions
7780 and then return a pseudo reg containing the sum.
7782 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7783 it also marks a label as absolutely required (it can't be dead).
7784 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7785 This is used for outputting expressions used in initializers.
7787 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7788 with a constant address even if that address is not normally legitimate.
7789 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7791 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7792 a call parameter. Such targets require special care as we haven't yet
7793 marked TARGET so that it's safe from being trashed by libcalls. We
7794 don't want to use TARGET for anything but the final result;
7795 Intermediate values must go elsewhere. Additionally, calls to
7796 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7798 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7799 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7800 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7801 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7802 recursively.
7804 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7805 In this case, we don't adjust a returned MEM rtx that wouldn't be
7806 sufficiently aligned for its mode; instead, it's up to the caller
7807 to deal with it afterwards. This is used to make sure that unaligned
7808 base objects for which out-of-bounds accesses are supported, for
7809 example record types with trailing arrays, aren't realigned behind
7810 the back of the caller.
7811 The normal operating mode is to pass FALSE for this parameter. */
7814 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7815 enum expand_modifier modifier, rtx *alt_rtl,
7816 bool inner_reference_p)
7818 rtx ret;
7820 /* Handle ERROR_MARK before anybody tries to access its type. */
7821 if (TREE_CODE (exp) == ERROR_MARK
7822 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7824 ret = CONST0_RTX (tmode);
7825 return ret ? ret : const0_rtx;
7828 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7829 inner_reference_p);
7830 return ret;
7833 /* Try to expand the conditional expression which is represented by
7834 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
7835 return the rtl reg which represents the result. Otherwise return
7836 NULL_RTX. */
7838 static rtx
7839 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7840 tree treeop1 ATTRIBUTE_UNUSED,
7841 tree treeop2 ATTRIBUTE_UNUSED)
7843 rtx insn;
7844 rtx op00, op01, op1, op2;
7845 enum rtx_code comparison_code;
7846 machine_mode comparison_mode;
7847 gimple *srcstmt;
7848 rtx temp;
7849 tree type = TREE_TYPE (treeop1);
7850 int unsignedp = TYPE_UNSIGNED (type);
7851 machine_mode mode = TYPE_MODE (type);
7852 machine_mode orig_mode = mode;
7854 /* If we cannot do a conditional move on the mode, try doing it
7855 with the promoted mode. */
7856 if (!can_conditionally_move_p (mode))
7858 mode = promote_mode (type, mode, &unsignedp);
7859 if (!can_conditionally_move_p (mode))
7860 return NULL_RTX;
7861 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7863 else
7864 temp = assign_temp (type, 0, 1);
7866 start_sequence ();
7867 expand_operands (treeop1, treeop2,
7868 temp, &op1, &op2, EXPAND_NORMAL);
7870 if (TREE_CODE (treeop0) == SSA_NAME
7871 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7873 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7874 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7875 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7876 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7877 comparison_mode = TYPE_MODE (type);
7878 unsignedp = TYPE_UNSIGNED (type);
7879 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7881 else if (COMPARISON_CLASS_P (treeop0))
7883 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7884 enum tree_code cmpcode = TREE_CODE (treeop0);
7885 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7886 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7887 unsignedp = TYPE_UNSIGNED (type);
7888 comparison_mode = TYPE_MODE (type);
7889 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7891 else
7893 op00 = expand_normal (treeop0);
7894 op01 = const0_rtx;
7895 comparison_code = NE;
7896 comparison_mode = GET_MODE (op00);
7897 if (comparison_mode == VOIDmode)
7898 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7901 if (GET_MODE (op1) != mode)
7902 op1 = gen_lowpart (mode, op1);
7904 if (GET_MODE (op2) != mode)
7905 op2 = gen_lowpart (mode, op2);
7907 /* Try to emit the conditional move. */
7908 insn = emit_conditional_move (temp, comparison_code,
7909 op00, op01, comparison_mode,
7910 op1, op2, mode,
7911 unsignedp);
7913 /* If we could do the conditional move, emit the sequence,
7914 and return. */
7915 if (insn)
7917 rtx_insn *seq = get_insns ();
7918 end_sequence ();
7919 emit_insn (seq);
7920 return convert_modes (orig_mode, mode, temp, 0);
7923 /* Otherwise discard the sequence and fall back to code with
7924 branches. */
7925 end_sequence ();
7926 return NULL_RTX;
7930 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
7931 enum expand_modifier modifier)
7933 rtx op0, op1, op2, temp;
7934 rtx_code_label *lab;
7935 tree type;
7936 int unsignedp;
7937 machine_mode mode;
7938 enum tree_code code = ops->code;
7939 optab this_optab;
7940 rtx subtarget, original_target;
7941 int ignore;
7942 bool reduce_bit_field;
7943 location_t loc = ops->location;
7944 tree treeop0, treeop1, treeop2;
7945 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7946 ? reduce_to_bit_field_precision ((expr), \
7947 target, \
7948 type) \
7949 : (expr))
7951 type = ops->type;
7952 mode = TYPE_MODE (type);
7953 unsignedp = TYPE_UNSIGNED (type);
7955 treeop0 = ops->op0;
7956 treeop1 = ops->op1;
7957 treeop2 = ops->op2;
7959 /* We should be called only on simple (binary or unary) expressions,
7960 exactly those that are valid in gimple expressions that aren't
7961 GIMPLE_SINGLE_RHS (or invalid). */
7962 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7963 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7964 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7966 ignore = (target == const0_rtx
7967 || ((CONVERT_EXPR_CODE_P (code)
7968 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7969 && TREE_CODE (type) == VOID_TYPE));
7971 /* We should be called only if we need the result. */
7972 gcc_assert (!ignore);
7974 /* An operation in what may be a bit-field type needs the
7975 result to be reduced to the precision of the bit-field type,
7976 which is narrower than that of the type's mode. */
7977 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7978 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7980 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7981 target = 0;
7983 /* Use subtarget as the target for operand 0 of a binary operation. */
7984 subtarget = get_subtarget (target);
7985 original_target = target;
7987 switch (code)
7989 case NON_LVALUE_EXPR:
7990 case PAREN_EXPR:
7991 CASE_CONVERT:
7992 if (treeop0 == error_mark_node)
7993 return const0_rtx;
7995 if (TREE_CODE (type) == UNION_TYPE)
7997 tree valtype = TREE_TYPE (treeop0);
7999 /* If both input and output are BLKmode, this conversion isn't doing
8000 anything except possibly changing memory attribute. */
8001 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8003 rtx result = expand_expr (treeop0, target, tmode,
8004 modifier);
8006 result = copy_rtx (result);
8007 set_mem_attributes (result, type, 0);
8008 return result;
8011 if (target == 0)
8013 if (TYPE_MODE (type) != BLKmode)
8014 target = gen_reg_rtx (TYPE_MODE (type));
8015 else
8016 target = assign_temp (type, 1, 1);
8019 if (MEM_P (target))
8020 /* Store data into beginning of memory target. */
8021 store_expr (treeop0,
8022 adjust_address (target, TYPE_MODE (valtype), 0),
8023 modifier == EXPAND_STACK_PARM,
8024 false, TYPE_REVERSE_STORAGE_ORDER (type));
8026 else
8028 gcc_assert (REG_P (target)
8029 && !TYPE_REVERSE_STORAGE_ORDER (type));
8031 /* Store this field into a union of the proper type. */
8032 store_field (target,
8033 MIN ((int_size_in_bytes (TREE_TYPE
8034 (treeop0))
8035 * BITS_PER_UNIT),
8036 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8037 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8038 false, false);
8041 /* Return the entire union. */
8042 return target;
8045 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8047 op0 = expand_expr (treeop0, target, VOIDmode,
8048 modifier);
8050 /* If the signedness of the conversion differs and OP0 is
8051 a promoted SUBREG, clear that indication since we now
8052 have to do the proper extension. */
8053 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8054 && GET_CODE (op0) == SUBREG)
8055 SUBREG_PROMOTED_VAR_P (op0) = 0;
8057 return REDUCE_BIT_FIELD (op0);
8060 op0 = expand_expr (treeop0, NULL_RTX, mode,
8061 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8062 if (GET_MODE (op0) == mode)
8065 /* If OP0 is a constant, just convert it into the proper mode. */
8066 else if (CONSTANT_P (op0))
8068 tree inner_type = TREE_TYPE (treeop0);
8069 machine_mode inner_mode = GET_MODE (op0);
8071 if (inner_mode == VOIDmode)
8072 inner_mode = TYPE_MODE (inner_type);
8074 if (modifier == EXPAND_INITIALIZER)
8075 op0 = lowpart_subreg (mode, op0, inner_mode);
8076 else
8077 op0= convert_modes (mode, inner_mode, op0,
8078 TYPE_UNSIGNED (inner_type));
8081 else if (modifier == EXPAND_INITIALIZER)
8082 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8084 else if (target == 0)
8085 op0 = convert_to_mode (mode, op0,
8086 TYPE_UNSIGNED (TREE_TYPE
8087 (treeop0)));
8088 else
8090 convert_move (target, op0,
8091 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8092 op0 = target;
8095 return REDUCE_BIT_FIELD (op0);
8097 case ADDR_SPACE_CONVERT_EXPR:
8099 tree treeop0_type = TREE_TYPE (treeop0);
8101 gcc_assert (POINTER_TYPE_P (type));
8102 gcc_assert (POINTER_TYPE_P (treeop0_type));
8104 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8105 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8107 /* Conversions between pointers to the same address space should
8108 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8109 gcc_assert (as_to != as_from);
8111 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8113 /* Ask target code to handle conversion between pointers
8114 to overlapping address spaces. */
8115 if (targetm.addr_space.subset_p (as_to, as_from)
8116 || targetm.addr_space.subset_p (as_from, as_to))
8118 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8120 else
8122 /* For disjoint address spaces, converting anything but a null
8123 pointer invokes undefined behavior. We truncate or extend the
8124 value as if we'd converted via integers, which handles 0 as
8125 required, and all others as the programmer likely expects. */
8126 #ifndef POINTERS_EXTEND_UNSIGNED
8127 const int POINTERS_EXTEND_UNSIGNED = 1;
8128 #endif
8129 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8130 op0, POINTERS_EXTEND_UNSIGNED);
8132 gcc_assert (op0);
8133 return op0;
8136 case POINTER_PLUS_EXPR:
8137 /* Even though the sizetype mode and the pointer's mode can be different
8138 expand is able to handle this correctly and get the correct result out
8139 of the PLUS_EXPR code. */
8140 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8141 if sizetype precision is smaller than pointer precision. */
8142 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8143 treeop1 = fold_convert_loc (loc, type,
8144 fold_convert_loc (loc, ssizetype,
8145 treeop1));
8146 /* If sizetype precision is larger than pointer precision, truncate the
8147 offset to have matching modes. */
8148 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8149 treeop1 = fold_convert_loc (loc, type, treeop1);
8151 case PLUS_EXPR:
8152 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8153 something else, make sure we add the register to the constant and
8154 then to the other thing. This case can occur during strength
8155 reduction and doing it this way will produce better code if the
8156 frame pointer or argument pointer is eliminated.
8158 fold-const.c will ensure that the constant is always in the inner
8159 PLUS_EXPR, so the only case we need to do anything about is if
8160 sp, ap, or fp is our second argument, in which case we must swap
8161 the innermost first argument and our second argument. */
8163 if (TREE_CODE (treeop0) == PLUS_EXPR
8164 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8165 && TREE_CODE (treeop1) == VAR_DECL
8166 && (DECL_RTL (treeop1) == frame_pointer_rtx
8167 || DECL_RTL (treeop1) == stack_pointer_rtx
8168 || DECL_RTL (treeop1) == arg_pointer_rtx))
8170 gcc_unreachable ();
8173 /* If the result is to be ptr_mode and we are adding an integer to
8174 something, we might be forming a constant. So try to use
8175 plus_constant. If it produces a sum and we can't accept it,
8176 use force_operand. This allows P = &ARR[const] to generate
8177 efficient code on machines where a SYMBOL_REF is not a valid
8178 address.
8180 If this is an EXPAND_SUM call, always return the sum. */
8181 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8182 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8184 if (modifier == EXPAND_STACK_PARM)
8185 target = 0;
8186 if (TREE_CODE (treeop0) == INTEGER_CST
8187 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8188 && TREE_CONSTANT (treeop1))
8190 rtx constant_part;
8191 HOST_WIDE_INT wc;
8192 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8194 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8195 EXPAND_SUM);
8196 /* Use wi::shwi to ensure that the constant is
8197 truncated according to the mode of OP1, then sign extended
8198 to a HOST_WIDE_INT. Using the constant directly can result
8199 in non-canonical RTL in a 64x32 cross compile. */
8200 wc = TREE_INT_CST_LOW (treeop0);
8201 constant_part =
8202 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8203 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8204 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8205 op1 = force_operand (op1, target);
8206 return REDUCE_BIT_FIELD (op1);
8209 else if (TREE_CODE (treeop1) == INTEGER_CST
8210 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8211 && TREE_CONSTANT (treeop0))
8213 rtx constant_part;
8214 HOST_WIDE_INT wc;
8215 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8217 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8218 (modifier == EXPAND_INITIALIZER
8219 ? EXPAND_INITIALIZER : EXPAND_SUM));
8220 if (! CONSTANT_P (op0))
8222 op1 = expand_expr (treeop1, NULL_RTX,
8223 VOIDmode, modifier);
8224 /* Return a PLUS if modifier says it's OK. */
8225 if (modifier == EXPAND_SUM
8226 || modifier == EXPAND_INITIALIZER)
8227 return simplify_gen_binary (PLUS, mode, op0, op1);
8228 goto binop2;
8230 /* Use wi::shwi to ensure that the constant is
8231 truncated according to the mode of OP1, then sign extended
8232 to a HOST_WIDE_INT. Using the constant directly can result
8233 in non-canonical RTL in a 64x32 cross compile. */
8234 wc = TREE_INT_CST_LOW (treeop1);
8235 constant_part
8236 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8237 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8238 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8239 op0 = force_operand (op0, target);
8240 return REDUCE_BIT_FIELD (op0);
8244 /* Use TER to expand pointer addition of a negated value
8245 as pointer subtraction. */
8246 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8247 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8248 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8249 && TREE_CODE (treeop1) == SSA_NAME
8250 && TYPE_MODE (TREE_TYPE (treeop0))
8251 == TYPE_MODE (TREE_TYPE (treeop1)))
8253 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8254 if (def)
8256 treeop1 = gimple_assign_rhs1 (def);
8257 code = MINUS_EXPR;
8258 goto do_minus;
8262 /* No sense saving up arithmetic to be done
8263 if it's all in the wrong mode to form part of an address.
8264 And force_operand won't know whether to sign-extend or
8265 zero-extend. */
8266 if (modifier != EXPAND_INITIALIZER
8267 && (modifier != EXPAND_SUM || mode != ptr_mode))
8269 expand_operands (treeop0, treeop1,
8270 subtarget, &op0, &op1, modifier);
8271 if (op0 == const0_rtx)
8272 return op1;
8273 if (op1 == const0_rtx)
8274 return op0;
8275 goto binop2;
8278 expand_operands (treeop0, treeop1,
8279 subtarget, &op0, &op1, modifier);
8280 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8282 case MINUS_EXPR:
8283 do_minus:
8284 /* For initializers, we are allowed to return a MINUS of two
8285 symbolic constants. Here we handle all cases when both operands
8286 are constant. */
8287 /* Handle difference of two symbolic constants,
8288 for the sake of an initializer. */
8289 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8290 && really_constant_p (treeop0)
8291 && really_constant_p (treeop1))
8293 expand_operands (treeop0, treeop1,
8294 NULL_RTX, &op0, &op1, modifier);
8296 /* If the last operand is a CONST_INT, use plus_constant of
8297 the negated constant. Else make the MINUS. */
8298 if (CONST_INT_P (op1))
8299 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8300 -INTVAL (op1)));
8301 else
8302 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8305 /* No sense saving up arithmetic to be done
8306 if it's all in the wrong mode to form part of an address.
8307 And force_operand won't know whether to sign-extend or
8308 zero-extend. */
8309 if (modifier != EXPAND_INITIALIZER
8310 && (modifier != EXPAND_SUM || mode != ptr_mode))
8311 goto binop;
8313 expand_operands (treeop0, treeop1,
8314 subtarget, &op0, &op1, modifier);
8316 /* Convert A - const to A + (-const). */
8317 if (CONST_INT_P (op1))
8319 op1 = negate_rtx (mode, op1);
8320 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8323 goto binop2;
8325 case WIDEN_MULT_PLUS_EXPR:
8326 case WIDEN_MULT_MINUS_EXPR:
8327 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8328 op2 = expand_normal (treeop2);
8329 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8330 target, unsignedp);
8331 return target;
8333 case WIDEN_MULT_EXPR:
8334 /* If first operand is constant, swap them.
8335 Thus the following special case checks need only
8336 check the second operand. */
8337 if (TREE_CODE (treeop0) == INTEGER_CST)
8338 std::swap (treeop0, treeop1);
8340 /* First, check if we have a multiplication of one signed and one
8341 unsigned operand. */
8342 if (TREE_CODE (treeop1) != INTEGER_CST
8343 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8344 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8346 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8347 this_optab = usmul_widen_optab;
8348 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8349 != CODE_FOR_nothing)
8351 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8352 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8353 EXPAND_NORMAL);
8354 else
8355 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8356 EXPAND_NORMAL);
8357 /* op0 and op1 might still be constant, despite the above
8358 != INTEGER_CST check. Handle it. */
8359 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8361 op0 = convert_modes (innermode, mode, op0, true);
8362 op1 = convert_modes (innermode, mode, op1, false);
8363 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8364 target, unsignedp));
8366 goto binop3;
8369 /* Check for a multiplication with matching signedness. */
8370 else if ((TREE_CODE (treeop1) == INTEGER_CST
8371 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8372 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8373 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8375 tree op0type = TREE_TYPE (treeop0);
8376 machine_mode innermode = TYPE_MODE (op0type);
8377 bool zextend_p = TYPE_UNSIGNED (op0type);
8378 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8379 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8381 if (TREE_CODE (treeop0) != INTEGER_CST)
8383 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8384 != CODE_FOR_nothing)
8386 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8387 EXPAND_NORMAL);
8388 /* op0 and op1 might still be constant, despite the above
8389 != INTEGER_CST check. Handle it. */
8390 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8392 widen_mult_const:
8393 op0 = convert_modes (innermode, mode, op0, zextend_p);
8395 = convert_modes (innermode, mode, op1,
8396 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8397 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8398 target,
8399 unsignedp));
8401 temp = expand_widening_mult (mode, op0, op1, target,
8402 unsignedp, this_optab);
8403 return REDUCE_BIT_FIELD (temp);
8405 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8406 != CODE_FOR_nothing
8407 && innermode == word_mode)
8409 rtx htem, hipart;
8410 op0 = expand_normal (treeop0);
8411 if (TREE_CODE (treeop1) == INTEGER_CST)
8412 op1 = convert_modes (innermode, mode,
8413 expand_normal (treeop1),
8414 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8415 else
8416 op1 = expand_normal (treeop1);
8417 /* op0 and op1 might still be constant, despite the above
8418 != INTEGER_CST check. Handle it. */
8419 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8420 goto widen_mult_const;
8421 temp = expand_binop (mode, other_optab, op0, op1, target,
8422 unsignedp, OPTAB_LIB_WIDEN);
8423 hipart = gen_highpart (innermode, temp);
8424 htem = expand_mult_highpart_adjust (innermode, hipart,
8425 op0, op1, hipart,
8426 zextend_p);
8427 if (htem != hipart)
8428 emit_move_insn (hipart, htem);
8429 return REDUCE_BIT_FIELD (temp);
8433 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8434 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8435 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8436 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8438 case FMA_EXPR:
8440 optab opt = fma_optab;
8441 gimple *def0, *def2;
8443 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8444 call. */
8445 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8447 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8448 tree call_expr;
8450 gcc_assert (fn != NULL_TREE);
8451 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8452 return expand_builtin (call_expr, target, subtarget, mode, false);
8455 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8456 /* The multiplication is commutative - look at its 2nd operand
8457 if the first isn't fed by a negate. */
8458 if (!def0)
8460 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8461 /* Swap operands if the 2nd operand is fed by a negate. */
8462 if (def0)
8463 std::swap (treeop0, treeop1);
8465 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8467 op0 = op2 = NULL;
8469 if (def0 && def2
8470 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8472 opt = fnms_optab;
8473 op0 = expand_normal (gimple_assign_rhs1 (def0));
8474 op2 = expand_normal (gimple_assign_rhs1 (def2));
8476 else if (def0
8477 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8479 opt = fnma_optab;
8480 op0 = expand_normal (gimple_assign_rhs1 (def0));
8482 else if (def2
8483 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8485 opt = fms_optab;
8486 op2 = expand_normal (gimple_assign_rhs1 (def2));
8489 if (op0 == NULL)
8490 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8491 if (op2 == NULL)
8492 op2 = expand_normal (treeop2);
8493 op1 = expand_normal (treeop1);
8495 return expand_ternary_op (TYPE_MODE (type), opt,
8496 op0, op1, op2, target, 0);
8499 case MULT_EXPR:
8500 /* If this is a fixed-point operation, then we cannot use the code
8501 below because "expand_mult" doesn't support sat/no-sat fixed-point
8502 multiplications. */
8503 if (ALL_FIXED_POINT_MODE_P (mode))
8504 goto binop;
8506 /* If first operand is constant, swap them.
8507 Thus the following special case checks need only
8508 check the second operand. */
8509 if (TREE_CODE (treeop0) == INTEGER_CST)
8510 std::swap (treeop0, treeop1);
8512 /* Attempt to return something suitable for generating an
8513 indexed address, for machines that support that. */
8515 if (modifier == EXPAND_SUM && mode == ptr_mode
8516 && tree_fits_shwi_p (treeop1))
8518 tree exp1 = treeop1;
8520 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8521 EXPAND_SUM);
8523 if (!REG_P (op0))
8524 op0 = force_operand (op0, NULL_RTX);
8525 if (!REG_P (op0))
8526 op0 = copy_to_mode_reg (mode, op0);
8528 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8529 gen_int_mode (tree_to_shwi (exp1),
8530 TYPE_MODE (TREE_TYPE (exp1)))));
8533 if (modifier == EXPAND_STACK_PARM)
8534 target = 0;
8536 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8537 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8539 case TRUNC_DIV_EXPR:
8540 case FLOOR_DIV_EXPR:
8541 case CEIL_DIV_EXPR:
8542 case ROUND_DIV_EXPR:
8543 case EXACT_DIV_EXPR:
8544 /* If this is a fixed-point operation, then we cannot use the code
8545 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8546 divisions. */
8547 if (ALL_FIXED_POINT_MODE_P (mode))
8548 goto binop;
8550 if (modifier == EXPAND_STACK_PARM)
8551 target = 0;
8552 /* Possible optimization: compute the dividend with EXPAND_SUM
8553 then if the divisor is constant can optimize the case
8554 where some terms of the dividend have coeffs divisible by it. */
8555 expand_operands (treeop0, treeop1,
8556 subtarget, &op0, &op1, EXPAND_NORMAL);
8557 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8559 case RDIV_EXPR:
8560 goto binop;
8562 case MULT_HIGHPART_EXPR:
8563 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8564 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8565 gcc_assert (temp);
8566 return temp;
8568 case TRUNC_MOD_EXPR:
8569 case FLOOR_MOD_EXPR:
8570 case CEIL_MOD_EXPR:
8571 case ROUND_MOD_EXPR:
8572 if (modifier == EXPAND_STACK_PARM)
8573 target = 0;
8574 expand_operands (treeop0, treeop1,
8575 subtarget, &op0, &op1, EXPAND_NORMAL);
8576 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8578 case FIXED_CONVERT_EXPR:
8579 op0 = expand_normal (treeop0);
8580 if (target == 0 || modifier == EXPAND_STACK_PARM)
8581 target = gen_reg_rtx (mode);
8583 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8584 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8585 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8586 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8587 else
8588 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8589 return target;
8591 case FIX_TRUNC_EXPR:
8592 op0 = expand_normal (treeop0);
8593 if (target == 0 || modifier == EXPAND_STACK_PARM)
8594 target = gen_reg_rtx (mode);
8595 expand_fix (target, op0, unsignedp);
8596 return target;
8598 case FLOAT_EXPR:
8599 op0 = expand_normal (treeop0);
8600 if (target == 0 || modifier == EXPAND_STACK_PARM)
8601 target = gen_reg_rtx (mode);
8602 /* expand_float can't figure out what to do if FROM has VOIDmode.
8603 So give it the correct mode. With -O, cse will optimize this. */
8604 if (GET_MODE (op0) == VOIDmode)
8605 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8606 op0);
8607 expand_float (target, op0,
8608 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8609 return target;
8611 case NEGATE_EXPR:
8612 op0 = expand_expr (treeop0, subtarget,
8613 VOIDmode, EXPAND_NORMAL);
8614 if (modifier == EXPAND_STACK_PARM)
8615 target = 0;
8616 temp = expand_unop (mode,
8617 optab_for_tree_code (NEGATE_EXPR, type,
8618 optab_default),
8619 op0, target, 0);
8620 gcc_assert (temp);
8621 return REDUCE_BIT_FIELD (temp);
8623 case ABS_EXPR:
8624 op0 = expand_expr (treeop0, subtarget,
8625 VOIDmode, EXPAND_NORMAL);
8626 if (modifier == EXPAND_STACK_PARM)
8627 target = 0;
8629 /* ABS_EXPR is not valid for complex arguments. */
8630 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8631 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8633 /* Unsigned abs is simply the operand. Testing here means we don't
8634 risk generating incorrect code below. */
8635 if (TYPE_UNSIGNED (type))
8636 return op0;
8638 return expand_abs (mode, op0, target, unsignedp,
8639 safe_from_p (target, treeop0, 1));
8641 case MAX_EXPR:
8642 case MIN_EXPR:
8643 target = original_target;
8644 if (target == 0
8645 || modifier == EXPAND_STACK_PARM
8646 || (MEM_P (target) && MEM_VOLATILE_P (target))
8647 || GET_MODE (target) != mode
8648 || (REG_P (target)
8649 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8650 target = gen_reg_rtx (mode);
8651 expand_operands (treeop0, treeop1,
8652 target, &op0, &op1, EXPAND_NORMAL);
8654 /* First try to do it with a special MIN or MAX instruction.
8655 If that does not win, use a conditional jump to select the proper
8656 value. */
8657 this_optab = optab_for_tree_code (code, type, optab_default);
8658 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8659 OPTAB_WIDEN);
8660 if (temp != 0)
8661 return temp;
8663 /* At this point, a MEM target is no longer useful; we will get better
8664 code without it. */
8666 if (! REG_P (target))
8667 target = gen_reg_rtx (mode);
8669 /* If op1 was placed in target, swap op0 and op1. */
8670 if (target != op0 && target == op1)
8671 std::swap (op0, op1);
8673 /* We generate better code and avoid problems with op1 mentioning
8674 target by forcing op1 into a pseudo if it isn't a constant. */
8675 if (! CONSTANT_P (op1))
8676 op1 = force_reg (mode, op1);
8679 enum rtx_code comparison_code;
8680 rtx cmpop1 = op1;
8682 if (code == MAX_EXPR)
8683 comparison_code = unsignedp ? GEU : GE;
8684 else
8685 comparison_code = unsignedp ? LEU : LE;
8687 /* Canonicalize to comparisons against 0. */
8688 if (op1 == const1_rtx)
8690 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8691 or (a != 0 ? a : 1) for unsigned.
8692 For MIN we are safe converting (a <= 1 ? a : 1)
8693 into (a <= 0 ? a : 1) */
8694 cmpop1 = const0_rtx;
8695 if (code == MAX_EXPR)
8696 comparison_code = unsignedp ? NE : GT;
8698 if (op1 == constm1_rtx && !unsignedp)
8700 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8701 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8702 cmpop1 = const0_rtx;
8703 if (code == MIN_EXPR)
8704 comparison_code = LT;
8707 /* Use a conditional move if possible. */
8708 if (can_conditionally_move_p (mode))
8710 rtx insn;
8712 start_sequence ();
8714 /* Try to emit the conditional move. */
8715 insn = emit_conditional_move (target, comparison_code,
8716 op0, cmpop1, mode,
8717 op0, op1, mode,
8718 unsignedp);
8720 /* If we could do the conditional move, emit the sequence,
8721 and return. */
8722 if (insn)
8724 rtx_insn *seq = get_insns ();
8725 end_sequence ();
8726 emit_insn (seq);
8727 return target;
8730 /* Otherwise discard the sequence and fall back to code with
8731 branches. */
8732 end_sequence ();
8735 if (target != op0)
8736 emit_move_insn (target, op0);
8738 lab = gen_label_rtx ();
8739 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8740 unsignedp, mode, NULL_RTX, NULL, lab,
8741 -1);
8743 emit_move_insn (target, op1);
8744 emit_label (lab);
8745 return target;
8747 case BIT_NOT_EXPR:
8748 op0 = expand_expr (treeop0, subtarget,
8749 VOIDmode, EXPAND_NORMAL);
8750 if (modifier == EXPAND_STACK_PARM)
8751 target = 0;
8752 /* In case we have to reduce the result to bitfield precision
8753 for unsigned bitfield expand this as XOR with a proper constant
8754 instead. */
8755 if (reduce_bit_field && TYPE_UNSIGNED (type))
8757 wide_int mask = wi::mask (TYPE_PRECISION (type),
8758 false, GET_MODE_PRECISION (mode));
8760 temp = expand_binop (mode, xor_optab, op0,
8761 immed_wide_int_const (mask, mode),
8762 target, 1, OPTAB_LIB_WIDEN);
8764 else
8765 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8766 gcc_assert (temp);
8767 return temp;
8769 /* ??? Can optimize bitwise operations with one arg constant.
8770 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8771 and (a bitwise1 b) bitwise2 b (etc)
8772 but that is probably not worth while. */
8774 case BIT_AND_EXPR:
8775 case BIT_IOR_EXPR:
8776 case BIT_XOR_EXPR:
8777 goto binop;
8779 case LROTATE_EXPR:
8780 case RROTATE_EXPR:
8781 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8782 || (GET_MODE_PRECISION (TYPE_MODE (type))
8783 == TYPE_PRECISION (type)));
8784 /* fall through */
8786 case LSHIFT_EXPR:
8787 case RSHIFT_EXPR:
8789 /* If this is a fixed-point operation, then we cannot use the code
8790 below because "expand_shift" doesn't support sat/no-sat fixed-point
8791 shifts. */
8792 if (ALL_FIXED_POINT_MODE_P (mode))
8793 goto binop;
8795 if (! safe_from_p (subtarget, treeop1, 1))
8796 subtarget = 0;
8797 if (modifier == EXPAND_STACK_PARM)
8798 target = 0;
8799 op0 = expand_expr (treeop0, subtarget,
8800 VOIDmode, EXPAND_NORMAL);
8802 /* Left shift optimization when shifting across word_size boundary.
8804 If mode == GET_MODE_WIDER_MODE (word_mode), then normally there isn't
8805 native instruction to support this wide mode left shift. Given below
8806 scenario:
8808 Type A = (Type) B << C
8810 |< T >|
8811 | dest_high | dest_low |
8813 | word_size |
8815 If the shift amount C caused we shift B to across the word size
8816 boundary, i.e part of B shifted into high half of destination
8817 register, and part of B remains in the low half, then GCC will use
8818 the following left shift expand logic:
8820 1. Initialize dest_low to B.
8821 2. Initialize every bit of dest_high to the sign bit of B.
8822 3. Logic left shift dest_low by C bit to finalize dest_low.
8823 The value of dest_low before this shift is kept in a temp D.
8824 4. Logic left shift dest_high by C.
8825 5. Logic right shift D by (word_size - C).
8826 6. Or the result of 4 and 5 to finalize dest_high.
8828 While, by checking gimple statements, if operand B is coming from
8829 signed extension, then we can simplify above expand logic into:
8831 1. dest_high = src_low >> (word_size - C).
8832 2. dest_low = src_low << C.
8834 We can use one arithmetic right shift to finish all the purpose of
8835 steps 2, 4, 5, 6, thus we reduce the steps needed from 6 into 2. */
8837 temp = NULL_RTX;
8838 if (code == LSHIFT_EXPR
8839 && target
8840 && REG_P (target)
8841 && ! unsignedp
8842 && mode == GET_MODE_WIDER_MODE (word_mode)
8843 && GET_MODE_SIZE (mode) == 2 * GET_MODE_SIZE (word_mode)
8844 && TREE_CONSTANT (treeop1)
8845 && TREE_CODE (treeop0) == SSA_NAME)
8847 gimple *def = SSA_NAME_DEF_STMT (treeop0);
8848 if (is_gimple_assign (def)
8849 && gimple_assign_rhs_code (def) == NOP_EXPR)
8851 machine_mode rmode = TYPE_MODE
8852 (TREE_TYPE (gimple_assign_rhs1 (def)));
8854 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (mode)
8855 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
8856 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
8857 >= GET_MODE_BITSIZE (word_mode)))
8859 rtx_insn *seq, *seq_old;
8860 unsigned int high_off = subreg_highpart_offset (word_mode,
8861 mode);
8862 rtx low = lowpart_subreg (word_mode, op0, mode);
8863 rtx dest_low = lowpart_subreg (word_mode, target, mode);
8864 rtx dest_high = simplify_gen_subreg (word_mode, target,
8865 mode, high_off);
8866 HOST_WIDE_INT ramount = (BITS_PER_WORD
8867 - TREE_INT_CST_LOW (treeop1));
8868 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
8870 start_sequence ();
8871 /* dest_high = src_low >> (word_size - C). */
8872 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
8873 rshift, dest_high, unsignedp);
8874 if (temp != dest_high)
8875 emit_move_insn (dest_high, temp);
8877 /* dest_low = src_low << C. */
8878 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
8879 treeop1, dest_low, unsignedp);
8880 if (temp != dest_low)
8881 emit_move_insn (dest_low, temp);
8883 seq = get_insns ();
8884 end_sequence ();
8885 temp = target ;
8887 if (have_insn_for (ASHIFT, mode))
8889 bool speed_p = optimize_insn_for_speed_p ();
8890 start_sequence ();
8891 rtx ret_old = expand_variable_shift (code, mode, op0,
8892 treeop1, target,
8893 unsignedp);
8895 seq_old = get_insns ();
8896 end_sequence ();
8897 if (seq_cost (seq, speed_p)
8898 >= seq_cost (seq_old, speed_p))
8900 seq = seq_old;
8901 temp = ret_old;
8904 emit_insn (seq);
8909 if (temp == NULL_RTX)
8910 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8911 unsignedp);
8912 if (code == LSHIFT_EXPR)
8913 temp = REDUCE_BIT_FIELD (temp);
8914 return temp;
8917 /* Could determine the answer when only additive constants differ. Also,
8918 the addition of one can be handled by changing the condition. */
8919 case LT_EXPR:
8920 case LE_EXPR:
8921 case GT_EXPR:
8922 case GE_EXPR:
8923 case EQ_EXPR:
8924 case NE_EXPR:
8925 case UNORDERED_EXPR:
8926 case ORDERED_EXPR:
8927 case UNLT_EXPR:
8928 case UNLE_EXPR:
8929 case UNGT_EXPR:
8930 case UNGE_EXPR:
8931 case UNEQ_EXPR:
8932 case LTGT_EXPR:
8934 temp = do_store_flag (ops,
8935 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8936 tmode != VOIDmode ? tmode : mode);
8937 if (temp)
8938 return temp;
8940 /* Use a compare and a jump for BLKmode comparisons, or for function
8941 type comparisons is have_canonicalize_funcptr_for_compare. */
8943 if ((target == 0
8944 || modifier == EXPAND_STACK_PARM
8945 || ! safe_from_p (target, treeop0, 1)
8946 || ! safe_from_p (target, treeop1, 1)
8947 /* Make sure we don't have a hard reg (such as function's return
8948 value) live across basic blocks, if not optimizing. */
8949 || (!optimize && REG_P (target)
8950 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8951 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8953 emit_move_insn (target, const0_rtx);
8955 rtx_code_label *lab1 = gen_label_rtx ();
8956 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8958 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8959 emit_move_insn (target, constm1_rtx);
8960 else
8961 emit_move_insn (target, const1_rtx);
8963 emit_label (lab1);
8964 return target;
8966 case COMPLEX_EXPR:
8967 /* Get the rtx code of the operands. */
8968 op0 = expand_normal (treeop0);
8969 op1 = expand_normal (treeop1);
8971 if (!target)
8972 target = gen_reg_rtx (TYPE_MODE (type));
8973 else
8974 /* If target overlaps with op1, then either we need to force
8975 op1 into a pseudo (if target also overlaps with op0),
8976 or write the complex parts in reverse order. */
8977 switch (GET_CODE (target))
8979 case CONCAT:
8980 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8982 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8984 complex_expr_force_op1:
8985 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8986 emit_move_insn (temp, op1);
8987 op1 = temp;
8988 break;
8990 complex_expr_swap_order:
8991 /* Move the imaginary (op1) and real (op0) parts to their
8992 location. */
8993 write_complex_part (target, op1, true);
8994 write_complex_part (target, op0, false);
8996 return target;
8998 break;
8999 case MEM:
9000 temp = adjust_address_nv (target,
9001 GET_MODE_INNER (GET_MODE (target)), 0);
9002 if (reg_overlap_mentioned_p (temp, op1))
9004 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9005 temp = adjust_address_nv (target, imode,
9006 GET_MODE_SIZE (imode));
9007 if (reg_overlap_mentioned_p (temp, op0))
9008 goto complex_expr_force_op1;
9009 goto complex_expr_swap_order;
9011 break;
9012 default:
9013 if (reg_overlap_mentioned_p (target, op1))
9015 if (reg_overlap_mentioned_p (target, op0))
9016 goto complex_expr_force_op1;
9017 goto complex_expr_swap_order;
9019 break;
9022 /* Move the real (op0) and imaginary (op1) parts to their location. */
9023 write_complex_part (target, op0, false);
9024 write_complex_part (target, op1, true);
9026 return target;
9028 case WIDEN_SUM_EXPR:
9030 tree oprnd0 = treeop0;
9031 tree oprnd1 = treeop1;
9033 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9034 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9035 target, unsignedp);
9036 return target;
9039 case REDUC_MAX_EXPR:
9040 case REDUC_MIN_EXPR:
9041 case REDUC_PLUS_EXPR:
9043 op0 = expand_normal (treeop0);
9044 this_optab = optab_for_tree_code (code, type, optab_default);
9045 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9047 struct expand_operand ops[2];
9048 enum insn_code icode = optab_handler (this_optab, vec_mode);
9050 create_output_operand (&ops[0], target, mode);
9051 create_input_operand (&ops[1], op0, vec_mode);
9052 expand_insn (icode, 2, ops);
9053 target = ops[0].value;
9054 if (GET_MODE (target) != mode)
9055 return gen_lowpart (tmode, target);
9056 return target;
9059 case VEC_UNPACK_HI_EXPR:
9060 case VEC_UNPACK_LO_EXPR:
9062 op0 = expand_normal (treeop0);
9063 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9064 target, unsignedp);
9065 gcc_assert (temp);
9066 return temp;
9069 case VEC_UNPACK_FLOAT_HI_EXPR:
9070 case VEC_UNPACK_FLOAT_LO_EXPR:
9072 op0 = expand_normal (treeop0);
9073 /* The signedness is determined from input operand. */
9074 temp = expand_widen_pattern_expr
9075 (ops, op0, NULL_RTX, NULL_RTX,
9076 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9078 gcc_assert (temp);
9079 return temp;
9082 case VEC_WIDEN_MULT_HI_EXPR:
9083 case VEC_WIDEN_MULT_LO_EXPR:
9084 case VEC_WIDEN_MULT_EVEN_EXPR:
9085 case VEC_WIDEN_MULT_ODD_EXPR:
9086 case VEC_WIDEN_LSHIFT_HI_EXPR:
9087 case VEC_WIDEN_LSHIFT_LO_EXPR:
9088 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9089 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9090 target, unsignedp);
9091 gcc_assert (target);
9092 return target;
9094 case VEC_PACK_TRUNC_EXPR:
9095 case VEC_PACK_SAT_EXPR:
9096 case VEC_PACK_FIX_TRUNC_EXPR:
9097 mode = TYPE_MODE (TREE_TYPE (treeop0));
9098 goto binop;
9100 case VEC_PERM_EXPR:
9101 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9102 op2 = expand_normal (treeop2);
9104 /* Careful here: if the target doesn't support integral vector modes,
9105 a constant selection vector could wind up smooshed into a normal
9106 integral constant. */
9107 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9109 tree sel_type = TREE_TYPE (treeop2);
9110 machine_mode vmode
9111 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9112 TYPE_VECTOR_SUBPARTS (sel_type));
9113 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9114 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9115 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9117 else
9118 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9120 temp = expand_vec_perm (mode, op0, op1, op2, target);
9121 gcc_assert (temp);
9122 return temp;
9124 case DOT_PROD_EXPR:
9126 tree oprnd0 = treeop0;
9127 tree oprnd1 = treeop1;
9128 tree oprnd2 = treeop2;
9129 rtx op2;
9131 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9132 op2 = expand_normal (oprnd2);
9133 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9134 target, unsignedp);
9135 return target;
9138 case SAD_EXPR:
9140 tree oprnd0 = treeop0;
9141 tree oprnd1 = treeop1;
9142 tree oprnd2 = treeop2;
9143 rtx op2;
9145 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9146 op2 = expand_normal (oprnd2);
9147 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9148 target, unsignedp);
9149 return target;
9152 case REALIGN_LOAD_EXPR:
9154 tree oprnd0 = treeop0;
9155 tree oprnd1 = treeop1;
9156 tree oprnd2 = treeop2;
9157 rtx op2;
9159 this_optab = optab_for_tree_code (code, type, optab_default);
9160 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9161 op2 = expand_normal (oprnd2);
9162 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9163 target, unsignedp);
9164 gcc_assert (temp);
9165 return temp;
9168 case COND_EXPR:
9170 /* A COND_EXPR with its type being VOID_TYPE represents a
9171 conditional jump and is handled in
9172 expand_gimple_cond_expr. */
9173 gcc_assert (!VOID_TYPE_P (type));
9175 /* Note that COND_EXPRs whose type is a structure or union
9176 are required to be constructed to contain assignments of
9177 a temporary variable, so that we can evaluate them here
9178 for side effect only. If type is void, we must do likewise. */
9180 gcc_assert (!TREE_ADDRESSABLE (type)
9181 && !ignore
9182 && TREE_TYPE (treeop1) != void_type_node
9183 && TREE_TYPE (treeop2) != void_type_node);
9185 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9186 if (temp)
9187 return temp;
9189 /* If we are not to produce a result, we have no target. Otherwise,
9190 if a target was specified use it; it will not be used as an
9191 intermediate target unless it is safe. If no target, use a
9192 temporary. */
9194 if (modifier != EXPAND_STACK_PARM
9195 && original_target
9196 && safe_from_p (original_target, treeop0, 1)
9197 && GET_MODE (original_target) == mode
9198 && !MEM_P (original_target))
9199 temp = original_target;
9200 else
9201 temp = assign_temp (type, 0, 1);
9203 do_pending_stack_adjust ();
9204 NO_DEFER_POP;
9205 rtx_code_label *lab0 = gen_label_rtx ();
9206 rtx_code_label *lab1 = gen_label_rtx ();
9207 jumpifnot (treeop0, lab0, -1);
9208 store_expr (treeop1, temp,
9209 modifier == EXPAND_STACK_PARM,
9210 false, false);
9212 emit_jump_insn (targetm.gen_jump (lab1));
9213 emit_barrier ();
9214 emit_label (lab0);
9215 store_expr (treeop2, temp,
9216 modifier == EXPAND_STACK_PARM,
9217 false, false);
9219 emit_label (lab1);
9220 OK_DEFER_POP;
9221 return temp;
9224 case VEC_COND_EXPR:
9225 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9226 return target;
9228 case BIT_INSERT_EXPR:
9230 unsigned bitpos = tree_to_uhwi (treeop2);
9231 unsigned bitsize;
9232 if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9233 bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9234 else
9235 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9236 rtx op0 = expand_normal (treeop0);
9237 rtx op1 = expand_normal (treeop1);
9238 rtx dst = gen_reg_rtx (mode);
9239 emit_move_insn (dst, op0);
9240 store_bit_field (dst, bitsize, bitpos, 0, 0,
9241 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9242 return dst;
9245 default:
9246 gcc_unreachable ();
9249 /* Here to do an ordinary binary operator. */
9250 binop:
9251 expand_operands (treeop0, treeop1,
9252 subtarget, &op0, &op1, EXPAND_NORMAL);
9253 binop2:
9254 this_optab = optab_for_tree_code (code, type, optab_default);
9255 binop3:
9256 if (modifier == EXPAND_STACK_PARM)
9257 target = 0;
9258 temp = expand_binop (mode, this_optab, op0, op1, target,
9259 unsignedp, OPTAB_LIB_WIDEN);
9260 gcc_assert (temp);
9261 /* Bitwise operations do not need bitfield reduction as we expect their
9262 operands being properly truncated. */
9263 if (code == BIT_XOR_EXPR
9264 || code == BIT_AND_EXPR
9265 || code == BIT_IOR_EXPR)
9266 return temp;
9267 return REDUCE_BIT_FIELD (temp);
9269 #undef REDUCE_BIT_FIELD
9272 /* Return TRUE if expression STMT is suitable for replacement.
9273 Never consider memory loads as replaceable, because those don't ever lead
9274 into constant expressions. */
9276 static bool
9277 stmt_is_replaceable_p (gimple *stmt)
9279 if (ssa_is_replaceable_p (stmt))
9281 /* Don't move around loads. */
9282 if (!gimple_assign_single_p (stmt)
9283 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9284 return true;
9286 return false;
9290 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9291 enum expand_modifier modifier, rtx *alt_rtl,
9292 bool inner_reference_p)
9294 rtx op0, op1, temp, decl_rtl;
9295 tree type;
9296 int unsignedp;
9297 machine_mode mode, dmode;
9298 enum tree_code code = TREE_CODE (exp);
9299 rtx subtarget, original_target;
9300 int ignore;
9301 tree context;
9302 bool reduce_bit_field;
9303 location_t loc = EXPR_LOCATION (exp);
9304 struct separate_ops ops;
9305 tree treeop0, treeop1, treeop2;
9306 tree ssa_name = NULL_TREE;
9307 gimple *g;
9309 type = TREE_TYPE (exp);
9310 mode = TYPE_MODE (type);
9311 unsignedp = TYPE_UNSIGNED (type);
9313 treeop0 = treeop1 = treeop2 = NULL_TREE;
9314 if (!VL_EXP_CLASS_P (exp))
9315 switch (TREE_CODE_LENGTH (code))
9317 default:
9318 case 3: treeop2 = TREE_OPERAND (exp, 2);
9319 case 2: treeop1 = TREE_OPERAND (exp, 1);
9320 case 1: treeop0 = TREE_OPERAND (exp, 0);
9321 case 0: break;
9323 ops.code = code;
9324 ops.type = type;
9325 ops.op0 = treeop0;
9326 ops.op1 = treeop1;
9327 ops.op2 = treeop2;
9328 ops.location = loc;
9330 ignore = (target == const0_rtx
9331 || ((CONVERT_EXPR_CODE_P (code)
9332 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9333 && TREE_CODE (type) == VOID_TYPE));
9335 /* An operation in what may be a bit-field type needs the
9336 result to be reduced to the precision of the bit-field type,
9337 which is narrower than that of the type's mode. */
9338 reduce_bit_field = (!ignore
9339 && INTEGRAL_TYPE_P (type)
9340 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9342 /* If we are going to ignore this result, we need only do something
9343 if there is a side-effect somewhere in the expression. If there
9344 is, short-circuit the most common cases here. Note that we must
9345 not call expand_expr with anything but const0_rtx in case this
9346 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9348 if (ignore)
9350 if (! TREE_SIDE_EFFECTS (exp))
9351 return const0_rtx;
9353 /* Ensure we reference a volatile object even if value is ignored, but
9354 don't do this if all we are doing is taking its address. */
9355 if (TREE_THIS_VOLATILE (exp)
9356 && TREE_CODE (exp) != FUNCTION_DECL
9357 && mode != VOIDmode && mode != BLKmode
9358 && modifier != EXPAND_CONST_ADDRESS)
9360 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9361 if (MEM_P (temp))
9362 copy_to_reg (temp);
9363 return const0_rtx;
9366 if (TREE_CODE_CLASS (code) == tcc_unary
9367 || code == BIT_FIELD_REF
9368 || code == COMPONENT_REF
9369 || code == INDIRECT_REF)
9370 return expand_expr (treeop0, const0_rtx, VOIDmode,
9371 modifier);
9373 else if (TREE_CODE_CLASS (code) == tcc_binary
9374 || TREE_CODE_CLASS (code) == tcc_comparison
9375 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9377 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9378 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9379 return const0_rtx;
9382 target = 0;
9385 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9386 target = 0;
9388 /* Use subtarget as the target for operand 0 of a binary operation. */
9389 subtarget = get_subtarget (target);
9390 original_target = target;
9392 switch (code)
9394 case LABEL_DECL:
9396 tree function = decl_function_context (exp);
9398 temp = label_rtx (exp);
9399 temp = gen_rtx_LABEL_REF (Pmode, temp);
9401 if (function != current_function_decl
9402 && function != 0)
9403 LABEL_REF_NONLOCAL_P (temp) = 1;
9405 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9406 return temp;
9409 case SSA_NAME:
9410 /* ??? ivopts calls expander, without any preparation from
9411 out-of-ssa. So fake instructions as if this was an access to the
9412 base variable. This unnecessarily allocates a pseudo, see how we can
9413 reuse it, if partition base vars have it set already. */
9414 if (!currently_expanding_to_rtl)
9416 tree var = SSA_NAME_VAR (exp);
9417 if (var && DECL_RTL_SET_P (var))
9418 return DECL_RTL (var);
9419 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9420 LAST_VIRTUAL_REGISTER + 1);
9423 g = get_gimple_for_ssa_name (exp);
9424 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9425 if (g == NULL
9426 && modifier == EXPAND_INITIALIZER
9427 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9428 && (optimize || !SSA_NAME_VAR (exp)
9429 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9430 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9431 g = SSA_NAME_DEF_STMT (exp);
9432 if (g)
9434 rtx r;
9435 location_t saved_loc = curr_insn_location ();
9436 location_t loc = gimple_location (g);
9437 if (loc != UNKNOWN_LOCATION)
9438 set_curr_insn_location (loc);
9439 ops.code = gimple_assign_rhs_code (g);
9440 switch (get_gimple_rhs_class (ops.code))
9442 case GIMPLE_TERNARY_RHS:
9443 ops.op2 = gimple_assign_rhs3 (g);
9444 /* Fallthru */
9445 case GIMPLE_BINARY_RHS:
9446 ops.op1 = gimple_assign_rhs2 (g);
9448 /* Try to expand conditonal compare. */
9449 if (targetm.gen_ccmp_first)
9451 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9452 r = expand_ccmp_expr (g);
9453 if (r)
9454 break;
9456 /* Fallthru */
9457 case GIMPLE_UNARY_RHS:
9458 ops.op0 = gimple_assign_rhs1 (g);
9459 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9460 ops.location = loc;
9461 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9462 break;
9463 case GIMPLE_SINGLE_RHS:
9465 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9466 tmode, modifier, NULL, inner_reference_p);
9467 break;
9469 default:
9470 gcc_unreachable ();
9472 set_curr_insn_location (saved_loc);
9473 if (REG_P (r) && !REG_EXPR (r))
9474 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9475 return r;
9478 ssa_name = exp;
9479 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9480 exp = SSA_NAME_VAR (ssa_name);
9481 goto expand_decl_rtl;
9483 case PARM_DECL:
9484 case VAR_DECL:
9485 /* If a static var's type was incomplete when the decl was written,
9486 but the type is complete now, lay out the decl now. */
9487 if (DECL_SIZE (exp) == 0
9488 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9489 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9490 layout_decl (exp, 0);
9492 /* ... fall through ... */
9494 case FUNCTION_DECL:
9495 case RESULT_DECL:
9496 decl_rtl = DECL_RTL (exp);
9497 expand_decl_rtl:
9498 gcc_assert (decl_rtl);
9500 /* DECL_MODE might change when TYPE_MODE depends on attribute target
9501 settings for VECTOR_TYPE_P that might switch for the function. */
9502 if (currently_expanding_to_rtl
9503 && code == VAR_DECL && MEM_P (decl_rtl)
9504 && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
9505 decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
9506 else
9507 decl_rtl = copy_rtx (decl_rtl);
9509 /* Record writes to register variables. */
9510 if (modifier == EXPAND_WRITE
9511 && REG_P (decl_rtl)
9512 && HARD_REGISTER_P (decl_rtl))
9513 add_to_hard_reg_set (&crtl->asm_clobbers,
9514 GET_MODE (decl_rtl), REGNO (decl_rtl));
9516 /* Ensure variable marked as used even if it doesn't go through
9517 a parser. If it hasn't be used yet, write out an external
9518 definition. */
9519 if (exp)
9520 TREE_USED (exp) = 1;
9522 /* Show we haven't gotten RTL for this yet. */
9523 temp = 0;
9525 /* Variables inherited from containing functions should have
9526 been lowered by this point. */
9527 if (exp)
9528 context = decl_function_context (exp);
9529 gcc_assert (!exp
9530 || SCOPE_FILE_SCOPE_P (context)
9531 || context == current_function_decl
9532 || TREE_STATIC (exp)
9533 || DECL_EXTERNAL (exp)
9534 /* ??? C++ creates functions that are not TREE_STATIC. */
9535 || TREE_CODE (exp) == FUNCTION_DECL);
9537 /* This is the case of an array whose size is to be determined
9538 from its initializer, while the initializer is still being parsed.
9539 ??? We aren't parsing while expanding anymore. */
9541 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9542 temp = validize_mem (decl_rtl);
9544 /* If DECL_RTL is memory, we are in the normal case and the
9545 address is not valid, get the address into a register. */
9547 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9549 if (alt_rtl)
9550 *alt_rtl = decl_rtl;
9551 decl_rtl = use_anchored_address (decl_rtl);
9552 if (modifier != EXPAND_CONST_ADDRESS
9553 && modifier != EXPAND_SUM
9554 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9555 : GET_MODE (decl_rtl),
9556 XEXP (decl_rtl, 0),
9557 MEM_ADDR_SPACE (decl_rtl)))
9558 temp = replace_equiv_address (decl_rtl,
9559 copy_rtx (XEXP (decl_rtl, 0)));
9562 /* If we got something, return it. But first, set the alignment
9563 if the address is a register. */
9564 if (temp != 0)
9566 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9567 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9569 return temp;
9572 if (exp)
9573 dmode = DECL_MODE (exp);
9574 else
9575 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9577 /* If the mode of DECL_RTL does not match that of the decl,
9578 there are two cases: we are dealing with a BLKmode value
9579 that is returned in a register, or we are dealing with
9580 a promoted value. In the latter case, return a SUBREG
9581 of the wanted mode, but mark it so that we know that it
9582 was already extended. */
9583 if (REG_P (decl_rtl)
9584 && dmode != BLKmode
9585 && GET_MODE (decl_rtl) != dmode)
9587 machine_mode pmode;
9589 /* Get the signedness to be used for this variable. Ensure we get
9590 the same mode we got when the variable was declared. */
9591 if (code != SSA_NAME)
9592 pmode = promote_decl_mode (exp, &unsignedp);
9593 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9594 && gimple_code (g) == GIMPLE_CALL
9595 && !gimple_call_internal_p (g))
9596 pmode = promote_function_mode (type, mode, &unsignedp,
9597 gimple_call_fntype (g),
9599 else
9600 pmode = promote_ssa_mode (ssa_name, &unsignedp);
9601 gcc_assert (GET_MODE (decl_rtl) == pmode);
9603 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9604 SUBREG_PROMOTED_VAR_P (temp) = 1;
9605 SUBREG_PROMOTED_SET (temp, unsignedp);
9606 return temp;
9609 return decl_rtl;
9611 case INTEGER_CST:
9612 /* Given that TYPE_PRECISION (type) is not always equal to
9613 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9614 the former to the latter according to the signedness of the
9615 type. */
9616 temp = immed_wide_int_const (wi::to_wide
9617 (exp,
9618 GET_MODE_PRECISION (TYPE_MODE (type))),
9619 TYPE_MODE (type));
9620 return temp;
9622 case VECTOR_CST:
9624 tree tmp = NULL_TREE;
9625 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9626 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9627 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9628 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9629 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9630 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9631 return const_vector_from_tree (exp);
9632 if (GET_MODE_CLASS (mode) == MODE_INT)
9634 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
9635 return const_scalar_mask_from_tree (exp);
9636 else
9638 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9639 if (type_for_mode)
9640 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
9641 type_for_mode, exp);
9644 if (!tmp)
9646 vec<constructor_elt, va_gc> *v;
9647 unsigned i;
9648 vec_alloc (v, VECTOR_CST_NELTS (exp));
9649 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9650 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9651 tmp = build_constructor (type, v);
9653 return expand_expr (tmp, ignore ? const0_rtx : target,
9654 tmode, modifier);
9657 case CONST_DECL:
9658 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9660 case REAL_CST:
9661 /* If optimized, generate immediate CONST_DOUBLE
9662 which will be turned into memory by reload if necessary.
9664 We used to force a register so that loop.c could see it. But
9665 this does not allow gen_* patterns to perform optimizations with
9666 the constants. It also produces two insns in cases like "x = 1.0;".
9667 On most machines, floating-point constants are not permitted in
9668 many insns, so we'd end up copying it to a register in any case.
9670 Now, we do the copying in expand_binop, if appropriate. */
9671 return const_double_from_real_value (TREE_REAL_CST (exp),
9672 TYPE_MODE (TREE_TYPE (exp)));
9674 case FIXED_CST:
9675 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9676 TYPE_MODE (TREE_TYPE (exp)));
9678 case COMPLEX_CST:
9679 /* Handle evaluating a complex constant in a CONCAT target. */
9680 if (original_target && GET_CODE (original_target) == CONCAT)
9682 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9683 rtx rtarg, itarg;
9685 rtarg = XEXP (original_target, 0);
9686 itarg = XEXP (original_target, 1);
9688 /* Move the real and imaginary parts separately. */
9689 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9690 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9692 if (op0 != rtarg)
9693 emit_move_insn (rtarg, op0);
9694 if (op1 != itarg)
9695 emit_move_insn (itarg, op1);
9697 return original_target;
9700 /* ... fall through ... */
9702 case STRING_CST:
9703 temp = expand_expr_constant (exp, 1, modifier);
9705 /* temp contains a constant address.
9706 On RISC machines where a constant address isn't valid,
9707 make some insns to get that address into a register. */
9708 if (modifier != EXPAND_CONST_ADDRESS
9709 && modifier != EXPAND_INITIALIZER
9710 && modifier != EXPAND_SUM
9711 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9712 MEM_ADDR_SPACE (temp)))
9713 return replace_equiv_address (temp,
9714 copy_rtx (XEXP (temp, 0)));
9715 return temp;
9717 case SAVE_EXPR:
9719 tree val = treeop0;
9720 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9721 inner_reference_p);
9723 if (!SAVE_EXPR_RESOLVED_P (exp))
9725 /* We can indeed still hit this case, typically via builtin
9726 expanders calling save_expr immediately before expanding
9727 something. Assume this means that we only have to deal
9728 with non-BLKmode values. */
9729 gcc_assert (GET_MODE (ret) != BLKmode);
9731 val = build_decl (curr_insn_location (),
9732 VAR_DECL, NULL, TREE_TYPE (exp));
9733 DECL_ARTIFICIAL (val) = 1;
9734 DECL_IGNORED_P (val) = 1;
9735 treeop0 = val;
9736 TREE_OPERAND (exp, 0) = treeop0;
9737 SAVE_EXPR_RESOLVED_P (exp) = 1;
9739 if (!CONSTANT_P (ret))
9740 ret = copy_to_reg (ret);
9741 SET_DECL_RTL (val, ret);
9744 return ret;
9748 case CONSTRUCTOR:
9749 /* If we don't need the result, just ensure we evaluate any
9750 subexpressions. */
9751 if (ignore)
9753 unsigned HOST_WIDE_INT idx;
9754 tree value;
9756 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9757 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9759 return const0_rtx;
9762 return expand_constructor (exp, target, modifier, false);
9764 case TARGET_MEM_REF:
9766 addr_space_t as
9767 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9768 enum insn_code icode;
9769 unsigned int align;
9771 op0 = addr_for_mem_ref (exp, as, true);
9772 op0 = memory_address_addr_space (mode, op0, as);
9773 temp = gen_rtx_MEM (mode, op0);
9774 set_mem_attributes (temp, exp, 0);
9775 set_mem_addr_space (temp, as);
9776 align = get_object_alignment (exp);
9777 if (modifier != EXPAND_WRITE
9778 && modifier != EXPAND_MEMORY
9779 && mode != BLKmode
9780 && align < GET_MODE_ALIGNMENT (mode)
9781 /* If the target does not have special handling for unaligned
9782 loads of mode then it can use regular moves for them. */
9783 && ((icode = optab_handler (movmisalign_optab, mode))
9784 != CODE_FOR_nothing))
9786 struct expand_operand ops[2];
9788 /* We've already validated the memory, and we're creating a
9789 new pseudo destination. The predicates really can't fail,
9790 nor can the generator. */
9791 create_output_operand (&ops[0], NULL_RTX, mode);
9792 create_fixed_operand (&ops[1], temp);
9793 expand_insn (icode, 2, ops);
9794 temp = ops[0].value;
9796 return temp;
9799 case MEM_REF:
9801 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
9802 addr_space_t as
9803 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9804 machine_mode address_mode;
9805 tree base = TREE_OPERAND (exp, 0);
9806 gimple *def_stmt;
9807 enum insn_code icode;
9808 unsigned align;
9809 /* Handle expansion of non-aliased memory with non-BLKmode. That
9810 might end up in a register. */
9811 if (mem_ref_refers_to_non_mem_p (exp))
9813 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9814 base = TREE_OPERAND (base, 0);
9815 if (offset == 0
9816 && !reverse
9817 && tree_fits_uhwi_p (TYPE_SIZE (type))
9818 && (GET_MODE_BITSIZE (DECL_MODE (base))
9819 == tree_to_uhwi (TYPE_SIZE (type))))
9820 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9821 target, tmode, modifier);
9822 if (TYPE_MODE (type) == BLKmode)
9824 temp = assign_stack_temp (DECL_MODE (base),
9825 GET_MODE_SIZE (DECL_MODE (base)));
9826 store_expr (base, temp, 0, false, false);
9827 temp = adjust_address (temp, BLKmode, offset);
9828 set_mem_size (temp, int_size_in_bytes (type));
9829 return temp;
9831 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9832 bitsize_int (offset * BITS_PER_UNIT));
9833 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
9834 return expand_expr (exp, target, tmode, modifier);
9836 address_mode = targetm.addr_space.address_mode (as);
9837 base = TREE_OPERAND (exp, 0);
9838 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9840 tree mask = gimple_assign_rhs2 (def_stmt);
9841 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9842 gimple_assign_rhs1 (def_stmt), mask);
9843 TREE_OPERAND (exp, 0) = base;
9845 align = get_object_alignment (exp);
9846 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9847 op0 = memory_address_addr_space (mode, op0, as);
9848 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9850 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9851 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9852 op0 = memory_address_addr_space (mode, op0, as);
9854 temp = gen_rtx_MEM (mode, op0);
9855 set_mem_attributes (temp, exp, 0);
9856 set_mem_addr_space (temp, as);
9857 if (TREE_THIS_VOLATILE (exp))
9858 MEM_VOLATILE_P (temp) = 1;
9859 if (modifier != EXPAND_WRITE
9860 && modifier != EXPAND_MEMORY
9861 && !inner_reference_p
9862 && mode != BLKmode
9863 && align < GET_MODE_ALIGNMENT (mode))
9865 if ((icode = optab_handler (movmisalign_optab, mode))
9866 != CODE_FOR_nothing)
9868 struct expand_operand ops[2];
9870 /* We've already validated the memory, and we're creating a
9871 new pseudo destination. The predicates really can't fail,
9872 nor can the generator. */
9873 create_output_operand (&ops[0], NULL_RTX, mode);
9874 create_fixed_operand (&ops[1], temp);
9875 expand_insn (icode, 2, ops);
9876 temp = ops[0].value;
9878 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9879 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9880 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9881 (modifier == EXPAND_STACK_PARM
9882 ? NULL_RTX : target),
9883 mode, mode, false);
9885 if (reverse
9886 && modifier != EXPAND_MEMORY
9887 && modifier != EXPAND_WRITE)
9888 temp = flip_storage_order (mode, temp);
9889 return temp;
9892 case ARRAY_REF:
9895 tree array = treeop0;
9896 tree index = treeop1;
9897 tree init;
9899 /* Fold an expression like: "foo"[2].
9900 This is not done in fold so it won't happen inside &.
9901 Don't fold if this is for wide characters since it's too
9902 difficult to do correctly and this is a very rare case. */
9904 if (modifier != EXPAND_CONST_ADDRESS
9905 && modifier != EXPAND_INITIALIZER
9906 && modifier != EXPAND_MEMORY)
9908 tree t = fold_read_from_constant_string (exp);
9910 if (t)
9911 return expand_expr (t, target, tmode, modifier);
9914 /* If this is a constant index into a constant array,
9915 just get the value from the array. Handle both the cases when
9916 we have an explicit constructor and when our operand is a variable
9917 that was declared const. */
9919 if (modifier != EXPAND_CONST_ADDRESS
9920 && modifier != EXPAND_INITIALIZER
9921 && modifier != EXPAND_MEMORY
9922 && TREE_CODE (array) == CONSTRUCTOR
9923 && ! TREE_SIDE_EFFECTS (array)
9924 && TREE_CODE (index) == INTEGER_CST)
9926 unsigned HOST_WIDE_INT ix;
9927 tree field, value;
9929 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9930 field, value)
9931 if (tree_int_cst_equal (field, index))
9933 if (!TREE_SIDE_EFFECTS (value))
9934 return expand_expr (fold (value), target, tmode, modifier);
9935 break;
9939 else if (optimize >= 1
9940 && modifier != EXPAND_CONST_ADDRESS
9941 && modifier != EXPAND_INITIALIZER
9942 && modifier != EXPAND_MEMORY
9943 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9944 && TREE_CODE (index) == INTEGER_CST
9945 && (TREE_CODE (array) == VAR_DECL
9946 || TREE_CODE (array) == CONST_DECL)
9947 && (init = ctor_for_folding (array)) != error_mark_node)
9949 if (init == NULL_TREE)
9951 tree value = build_zero_cst (type);
9952 if (TREE_CODE (value) == CONSTRUCTOR)
9954 /* If VALUE is a CONSTRUCTOR, this optimization is only
9955 useful if this doesn't store the CONSTRUCTOR into
9956 memory. If it does, it is more efficient to just
9957 load the data from the array directly. */
9958 rtx ret = expand_constructor (value, target,
9959 modifier, true);
9960 if (ret == NULL_RTX)
9961 value = NULL_TREE;
9964 if (value)
9965 return expand_expr (value, target, tmode, modifier);
9967 else if (TREE_CODE (init) == CONSTRUCTOR)
9969 unsigned HOST_WIDE_INT ix;
9970 tree field, value;
9972 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9973 field, value)
9974 if (tree_int_cst_equal (field, index))
9976 if (TREE_SIDE_EFFECTS (value))
9977 break;
9979 if (TREE_CODE (value) == CONSTRUCTOR)
9981 /* If VALUE is a CONSTRUCTOR, this
9982 optimization is only useful if
9983 this doesn't store the CONSTRUCTOR
9984 into memory. If it does, it is more
9985 efficient to just load the data from
9986 the array directly. */
9987 rtx ret = expand_constructor (value, target,
9988 modifier, true);
9989 if (ret == NULL_RTX)
9990 break;
9993 return
9994 expand_expr (fold (value), target, tmode, modifier);
9997 else if (TREE_CODE (init) == STRING_CST)
9999 tree low_bound = array_ref_low_bound (exp);
10000 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10002 /* Optimize the special case of a zero lower bound.
10004 We convert the lower bound to sizetype to avoid problems
10005 with constant folding. E.g. suppose the lower bound is
10006 1 and its mode is QI. Without the conversion
10007 (ARRAY + (INDEX - (unsigned char)1))
10008 becomes
10009 (ARRAY + (-(unsigned char)1) + INDEX)
10010 which becomes
10011 (ARRAY + 255 + INDEX). Oops! */
10012 if (!integer_zerop (low_bound))
10013 index1 = size_diffop_loc (loc, index1,
10014 fold_convert_loc (loc, sizetype,
10015 low_bound));
10017 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10019 tree type = TREE_TYPE (TREE_TYPE (init));
10020 machine_mode mode = TYPE_MODE (type);
10022 if (GET_MODE_CLASS (mode) == MODE_INT
10023 && GET_MODE_SIZE (mode) == 1)
10024 return gen_int_mode (TREE_STRING_POINTER (init)
10025 [TREE_INT_CST_LOW (index1)],
10026 mode);
10031 goto normal_inner_ref;
10033 case COMPONENT_REF:
10034 /* If the operand is a CONSTRUCTOR, we can just extract the
10035 appropriate field if it is present. */
10036 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10038 unsigned HOST_WIDE_INT idx;
10039 tree field, value;
10041 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10042 idx, field, value)
10043 if (field == treeop1
10044 /* We can normally use the value of the field in the
10045 CONSTRUCTOR. However, if this is a bitfield in
10046 an integral mode that we can fit in a HOST_WIDE_INT,
10047 we must mask only the number of bits in the bitfield,
10048 since this is done implicitly by the constructor. If
10049 the bitfield does not meet either of those conditions,
10050 we can't do this optimization. */
10051 && (! DECL_BIT_FIELD (field)
10052 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10053 && (GET_MODE_PRECISION (DECL_MODE (field))
10054 <= HOST_BITS_PER_WIDE_INT))))
10056 if (DECL_BIT_FIELD (field)
10057 && modifier == EXPAND_STACK_PARM)
10058 target = 0;
10059 op0 = expand_expr (value, target, tmode, modifier);
10060 if (DECL_BIT_FIELD (field))
10062 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10063 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10065 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10067 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10068 imode);
10069 op0 = expand_and (imode, op0, op1, target);
10071 else
10073 int count = GET_MODE_PRECISION (imode) - bitsize;
10075 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10076 target, 0);
10077 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10078 target, 0);
10082 return op0;
10085 goto normal_inner_ref;
10087 case BIT_FIELD_REF:
10088 case ARRAY_RANGE_REF:
10089 normal_inner_ref:
10091 machine_mode mode1, mode2;
10092 HOST_WIDE_INT bitsize, bitpos;
10093 tree offset;
10094 int reversep, volatilep = 0, must_force_mem;
10095 tree tem
10096 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10097 &unsignedp, &reversep, &volatilep, true);
10098 rtx orig_op0, memloc;
10099 bool clear_mem_expr = false;
10101 /* If we got back the original object, something is wrong. Perhaps
10102 we are evaluating an expression too early. In any event, don't
10103 infinitely recurse. */
10104 gcc_assert (tem != exp);
10106 /* If TEM's type is a union of variable size, pass TARGET to the inner
10107 computation, since it will need a temporary and TARGET is known
10108 to have to do. This occurs in unchecked conversion in Ada. */
10109 orig_op0 = op0
10110 = expand_expr_real (tem,
10111 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10112 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10113 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10114 != INTEGER_CST)
10115 && modifier != EXPAND_STACK_PARM
10116 ? target : NULL_RTX),
10117 VOIDmode,
10118 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10119 NULL, true);
10121 /* If the field has a mode, we want to access it in the
10122 field's mode, not the computed mode.
10123 If a MEM has VOIDmode (external with incomplete type),
10124 use BLKmode for it instead. */
10125 if (MEM_P (op0))
10127 if (mode1 != VOIDmode)
10128 op0 = adjust_address (op0, mode1, 0);
10129 else if (GET_MODE (op0) == VOIDmode)
10130 op0 = adjust_address (op0, BLKmode, 0);
10133 mode2
10134 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10136 /* If we have either an offset, a BLKmode result, or a reference
10137 outside the underlying object, we must force it to memory.
10138 Such a case can occur in Ada if we have unchecked conversion
10139 of an expression from a scalar type to an aggregate type or
10140 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10141 passed a partially uninitialized object or a view-conversion
10142 to a larger size. */
10143 must_force_mem = (offset
10144 || mode1 == BLKmode
10145 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10147 /* Handle CONCAT first. */
10148 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10150 if (bitpos == 0
10151 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10153 if (reversep)
10154 op0 = flip_storage_order (GET_MODE (op0), op0);
10155 return op0;
10157 if (bitpos == 0
10158 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10159 && bitsize)
10161 op0 = XEXP (op0, 0);
10162 mode2 = GET_MODE (op0);
10164 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10165 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10166 && bitpos
10167 && bitsize)
10169 op0 = XEXP (op0, 1);
10170 bitpos = 0;
10171 mode2 = GET_MODE (op0);
10173 else
10174 /* Otherwise force into memory. */
10175 must_force_mem = 1;
10178 /* If this is a constant, put it in a register if it is a legitimate
10179 constant and we don't need a memory reference. */
10180 if (CONSTANT_P (op0)
10181 && mode2 != BLKmode
10182 && targetm.legitimate_constant_p (mode2, op0)
10183 && !must_force_mem)
10184 op0 = force_reg (mode2, op0);
10186 /* Otherwise, if this is a constant, try to force it to the constant
10187 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10188 is a legitimate constant. */
10189 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10190 op0 = validize_mem (memloc);
10192 /* Otherwise, if this is a constant or the object is not in memory
10193 and need be, put it there. */
10194 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10196 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10197 emit_move_insn (memloc, op0);
10198 op0 = memloc;
10199 clear_mem_expr = true;
10202 if (offset)
10204 machine_mode address_mode;
10205 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10206 EXPAND_SUM);
10208 gcc_assert (MEM_P (op0));
10210 address_mode = get_address_mode (op0);
10211 if (GET_MODE (offset_rtx) != address_mode)
10213 /* We cannot be sure that the RTL in offset_rtx is valid outside
10214 of a memory address context, so force it into a register
10215 before attempting to convert it to the desired mode. */
10216 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10217 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10220 /* See the comment in expand_assignment for the rationale. */
10221 if (mode1 != VOIDmode
10222 && bitpos != 0
10223 && bitsize > 0
10224 && (bitpos % bitsize) == 0
10225 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10226 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10228 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10229 bitpos = 0;
10232 op0 = offset_address (op0, offset_rtx,
10233 highest_pow2_factor (offset));
10236 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10237 record its alignment as BIGGEST_ALIGNMENT. */
10238 if (MEM_P (op0) && bitpos == 0 && offset != 0
10239 && is_aligning_offset (offset, tem))
10240 set_mem_align (op0, BIGGEST_ALIGNMENT);
10242 /* Don't forget about volatility even if this is a bitfield. */
10243 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10245 if (op0 == orig_op0)
10246 op0 = copy_rtx (op0);
10248 MEM_VOLATILE_P (op0) = 1;
10251 /* In cases where an aligned union has an unaligned object
10252 as a field, we might be extracting a BLKmode value from
10253 an integer-mode (e.g., SImode) object. Handle this case
10254 by doing the extract into an object as wide as the field
10255 (which we know to be the width of a basic mode), then
10256 storing into memory, and changing the mode to BLKmode. */
10257 if (mode1 == VOIDmode
10258 || REG_P (op0) || GET_CODE (op0) == SUBREG
10259 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10260 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10261 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10262 && modifier != EXPAND_CONST_ADDRESS
10263 && modifier != EXPAND_INITIALIZER
10264 && modifier != EXPAND_MEMORY)
10265 /* If the bitfield is volatile and the bitsize
10266 is narrower than the access size of the bitfield,
10267 we need to extract bitfields from the access. */
10268 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10269 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10270 && mode1 != BLKmode
10271 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10272 /* If the field isn't aligned enough to fetch as a memref,
10273 fetch it as a bit field. */
10274 || (mode1 != BLKmode
10275 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10276 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10277 || (MEM_P (op0)
10278 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10279 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10280 && modifier != EXPAND_MEMORY
10281 && ((modifier == EXPAND_CONST_ADDRESS
10282 || modifier == EXPAND_INITIALIZER)
10283 ? STRICT_ALIGNMENT
10284 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10285 || (bitpos % BITS_PER_UNIT != 0)))
10286 /* If the type and the field are a constant size and the
10287 size of the type isn't the same size as the bitfield,
10288 we must use bitfield operations. */
10289 || (bitsize >= 0
10290 && TYPE_SIZE (TREE_TYPE (exp))
10291 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10292 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10293 bitsize)))
10295 machine_mode ext_mode = mode;
10297 if (ext_mode == BLKmode
10298 && ! (target != 0 && MEM_P (op0)
10299 && MEM_P (target)
10300 && bitpos % BITS_PER_UNIT == 0))
10301 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10303 if (ext_mode == BLKmode)
10305 if (target == 0)
10306 target = assign_temp (type, 1, 1);
10308 /* ??? Unlike the similar test a few lines below, this one is
10309 very likely obsolete. */
10310 if (bitsize == 0)
10311 return target;
10313 /* In this case, BITPOS must start at a byte boundary and
10314 TARGET, if specified, must be a MEM. */
10315 gcc_assert (MEM_P (op0)
10316 && (!target || MEM_P (target))
10317 && !(bitpos % BITS_PER_UNIT));
10319 emit_block_move (target,
10320 adjust_address (op0, VOIDmode,
10321 bitpos / BITS_PER_UNIT),
10322 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10323 / BITS_PER_UNIT),
10324 (modifier == EXPAND_STACK_PARM
10325 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10327 return target;
10330 /* If we have nothing to extract, the result will be 0 for targets
10331 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10332 return 0 for the sake of consistency, as reading a zero-sized
10333 bitfield is valid in Ada and the value is fully specified. */
10334 if (bitsize == 0)
10335 return const0_rtx;
10337 op0 = validize_mem (op0);
10339 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10340 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10342 /* If the result has a record type and the extraction is done in
10343 an integral mode, then the field may be not aligned on a byte
10344 boundary; in this case, if it has reverse storage order, it
10345 needs to be extracted as a scalar field with reverse storage
10346 order and put back into memory order afterwards. */
10347 if (TREE_CODE (type) == RECORD_TYPE
10348 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10349 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10351 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10352 (modifier == EXPAND_STACK_PARM
10353 ? NULL_RTX : target),
10354 ext_mode, ext_mode, reversep);
10356 /* If the result has a record type and the mode of OP0 is an
10357 integral mode then, if BITSIZE is narrower than this mode
10358 and this is for big-endian data, we must put the field
10359 into the high-order bits. And we must also put it back
10360 into memory order if it has been previously reversed. */
10361 if (TREE_CODE (type) == RECORD_TYPE
10362 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10364 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10366 if (bitsize < size
10367 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10368 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10369 size - bitsize, op0, 1);
10371 if (reversep)
10372 op0 = flip_storage_order (GET_MODE (op0), op0);
10375 /* If the result type is BLKmode, store the data into a temporary
10376 of the appropriate type, but with the mode corresponding to the
10377 mode for the data we have (op0's mode). */
10378 if (mode == BLKmode)
10380 rtx new_rtx
10381 = assign_stack_temp_for_type (ext_mode,
10382 GET_MODE_BITSIZE (ext_mode),
10383 type);
10384 emit_move_insn (new_rtx, op0);
10385 op0 = copy_rtx (new_rtx);
10386 PUT_MODE (op0, BLKmode);
10389 return op0;
10392 /* If the result is BLKmode, use that to access the object
10393 now as well. */
10394 if (mode == BLKmode)
10395 mode1 = BLKmode;
10397 /* Get a reference to just this component. */
10398 if (modifier == EXPAND_CONST_ADDRESS
10399 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10400 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10401 else
10402 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10404 if (op0 == orig_op0)
10405 op0 = copy_rtx (op0);
10407 /* Don't set memory attributes if the base expression is
10408 SSA_NAME that got expanded as a MEM. In that case, we should
10409 just honor its original memory attributes. */
10410 if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10411 set_mem_attributes (op0, exp, 0);
10413 if (REG_P (XEXP (op0, 0)))
10414 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10416 /* If op0 is a temporary because the original expressions was forced
10417 to memory, clear MEM_EXPR so that the original expression cannot
10418 be marked as addressable through MEM_EXPR of the temporary. */
10419 if (clear_mem_expr)
10420 set_mem_expr (op0, NULL_TREE);
10422 MEM_VOLATILE_P (op0) |= volatilep;
10424 if (reversep
10425 && modifier != EXPAND_MEMORY
10426 && modifier != EXPAND_WRITE)
10427 op0 = flip_storage_order (mode1, op0);
10429 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10430 || modifier == EXPAND_CONST_ADDRESS
10431 || modifier == EXPAND_INITIALIZER)
10432 return op0;
10434 if (target == 0)
10435 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10437 convert_move (target, op0, unsignedp);
10438 return target;
10441 case OBJ_TYPE_REF:
10442 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10444 case CALL_EXPR:
10445 /* All valid uses of __builtin_va_arg_pack () are removed during
10446 inlining. */
10447 if (CALL_EXPR_VA_ARG_PACK (exp))
10448 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10450 tree fndecl = get_callee_fndecl (exp), attr;
10452 if (fndecl
10453 && (attr = lookup_attribute ("error",
10454 DECL_ATTRIBUTES (fndecl))) != NULL)
10455 error ("%Kcall to %qs declared with attribute error: %s",
10456 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10457 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10458 if (fndecl
10459 && (attr = lookup_attribute ("warning",
10460 DECL_ATTRIBUTES (fndecl))) != NULL)
10461 warning_at (tree_nonartificial_location (exp),
10462 0, "%Kcall to %qs declared with attribute warning: %s",
10463 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10464 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10466 /* Check for a built-in function. */
10467 if (fndecl && DECL_BUILT_IN (fndecl))
10469 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10470 if (CALL_WITH_BOUNDS_P (exp))
10471 return expand_builtin_with_bounds (exp, target, subtarget,
10472 tmode, ignore);
10473 else
10474 return expand_builtin (exp, target, subtarget, tmode, ignore);
10477 return expand_call (exp, target, ignore);
10479 case VIEW_CONVERT_EXPR:
10480 op0 = NULL_RTX;
10482 /* If we are converting to BLKmode, try to avoid an intermediate
10483 temporary by fetching an inner memory reference. */
10484 if (mode == BLKmode
10485 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10486 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10487 && handled_component_p (treeop0))
10489 machine_mode mode1;
10490 HOST_WIDE_INT bitsize, bitpos;
10491 tree offset;
10492 int unsignedp, reversep, volatilep = 0;
10493 tree tem
10494 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10495 &unsignedp, &reversep, &volatilep, true);
10496 rtx orig_op0;
10498 /* ??? We should work harder and deal with non-zero offsets. */
10499 if (!offset
10500 && (bitpos % BITS_PER_UNIT) == 0
10501 && !reversep
10502 && bitsize >= 0
10503 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10505 /* See the normal_inner_ref case for the rationale. */
10506 orig_op0
10507 = expand_expr_real (tem,
10508 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10509 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10510 != INTEGER_CST)
10511 && modifier != EXPAND_STACK_PARM
10512 ? target : NULL_RTX),
10513 VOIDmode,
10514 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10515 NULL, true);
10517 if (MEM_P (orig_op0))
10519 op0 = orig_op0;
10521 /* Get a reference to just this component. */
10522 if (modifier == EXPAND_CONST_ADDRESS
10523 || modifier == EXPAND_SUM
10524 || modifier == EXPAND_INITIALIZER)
10525 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10526 else
10527 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10529 if (op0 == orig_op0)
10530 op0 = copy_rtx (op0);
10532 set_mem_attributes (op0, treeop0, 0);
10533 if (REG_P (XEXP (op0, 0)))
10534 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10536 MEM_VOLATILE_P (op0) |= volatilep;
10541 if (!op0)
10542 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10543 NULL, inner_reference_p);
10545 /* If the input and output modes are both the same, we are done. */
10546 if (mode == GET_MODE (op0))
10548 /* If neither mode is BLKmode, and both modes are the same size
10549 then we can use gen_lowpart. */
10550 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10551 && (GET_MODE_PRECISION (mode)
10552 == GET_MODE_PRECISION (GET_MODE (op0)))
10553 && !COMPLEX_MODE_P (GET_MODE (op0)))
10555 if (GET_CODE (op0) == SUBREG)
10556 op0 = force_reg (GET_MODE (op0), op0);
10557 temp = gen_lowpart_common (mode, op0);
10558 if (temp)
10559 op0 = temp;
10560 else
10562 if (!REG_P (op0) && !MEM_P (op0))
10563 op0 = force_reg (GET_MODE (op0), op0);
10564 op0 = gen_lowpart (mode, op0);
10567 /* If both types are integral, convert from one mode to the other. */
10568 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10569 op0 = convert_modes (mode, GET_MODE (op0), op0,
10570 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10571 /* If the output type is a bit-field type, do an extraction. */
10572 else if (reduce_bit_field)
10573 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10574 TYPE_UNSIGNED (type), NULL_RTX,
10575 mode, mode, false);
10576 /* As a last resort, spill op0 to memory, and reload it in a
10577 different mode. */
10578 else if (!MEM_P (op0))
10580 /* If the operand is not a MEM, force it into memory. Since we
10581 are going to be changing the mode of the MEM, don't call
10582 force_const_mem for constants because we don't allow pool
10583 constants to change mode. */
10584 tree inner_type = TREE_TYPE (treeop0);
10586 gcc_assert (!TREE_ADDRESSABLE (exp));
10588 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10589 target
10590 = assign_stack_temp_for_type
10591 (TYPE_MODE (inner_type),
10592 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10594 emit_move_insn (target, op0);
10595 op0 = target;
10598 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10599 output type is such that the operand is known to be aligned, indicate
10600 that it is. Otherwise, we need only be concerned about alignment for
10601 non-BLKmode results. */
10602 if (MEM_P (op0))
10604 enum insn_code icode;
10606 if (TYPE_ALIGN_OK (type))
10608 /* ??? Copying the MEM without substantially changing it might
10609 run afoul of the code handling volatile memory references in
10610 store_expr, which assumes that TARGET is returned unmodified
10611 if it has been used. */
10612 op0 = copy_rtx (op0);
10613 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10615 else if (modifier != EXPAND_WRITE
10616 && modifier != EXPAND_MEMORY
10617 && !inner_reference_p
10618 && mode != BLKmode
10619 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10621 /* If the target does have special handling for unaligned
10622 loads of mode then use them. */
10623 if ((icode = optab_handler (movmisalign_optab, mode))
10624 != CODE_FOR_nothing)
10626 rtx reg;
10628 op0 = adjust_address (op0, mode, 0);
10629 /* We've already validated the memory, and we're creating a
10630 new pseudo destination. The predicates really can't
10631 fail. */
10632 reg = gen_reg_rtx (mode);
10634 /* Nor can the insn generator. */
10635 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10636 emit_insn (insn);
10637 return reg;
10639 else if (STRICT_ALIGNMENT)
10641 tree inner_type = TREE_TYPE (treeop0);
10642 HOST_WIDE_INT temp_size
10643 = MAX (int_size_in_bytes (inner_type),
10644 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10645 rtx new_rtx
10646 = assign_stack_temp_for_type (mode, temp_size, type);
10647 rtx new_with_op0_mode
10648 = adjust_address (new_rtx, GET_MODE (op0), 0);
10650 gcc_assert (!TREE_ADDRESSABLE (exp));
10652 if (GET_MODE (op0) == BLKmode)
10653 emit_block_move (new_with_op0_mode, op0,
10654 GEN_INT (GET_MODE_SIZE (mode)),
10655 (modifier == EXPAND_STACK_PARM
10656 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10657 else
10658 emit_move_insn (new_with_op0_mode, op0);
10660 op0 = new_rtx;
10664 op0 = adjust_address (op0, mode, 0);
10667 return op0;
10669 case MODIFY_EXPR:
10671 tree lhs = treeop0;
10672 tree rhs = treeop1;
10673 gcc_assert (ignore);
10675 /* Check for |= or &= of a bitfield of size one into another bitfield
10676 of size 1. In this case, (unless we need the result of the
10677 assignment) we can do this more efficiently with a
10678 test followed by an assignment, if necessary.
10680 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10681 things change so we do, this code should be enhanced to
10682 support it. */
10683 if (TREE_CODE (lhs) == COMPONENT_REF
10684 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10685 || TREE_CODE (rhs) == BIT_AND_EXPR)
10686 && TREE_OPERAND (rhs, 0) == lhs
10687 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10688 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10689 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10691 rtx_code_label *label = gen_label_rtx ();
10692 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10693 do_jump (TREE_OPERAND (rhs, 1),
10694 value ? label : 0,
10695 value ? 0 : label, -1);
10696 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10697 false);
10698 do_pending_stack_adjust ();
10699 emit_label (label);
10700 return const0_rtx;
10703 expand_assignment (lhs, rhs, false);
10704 return const0_rtx;
10707 case ADDR_EXPR:
10708 return expand_expr_addr_expr (exp, target, tmode, modifier);
10710 case REALPART_EXPR:
10711 op0 = expand_normal (treeop0);
10712 return read_complex_part (op0, false);
10714 case IMAGPART_EXPR:
10715 op0 = expand_normal (treeop0);
10716 return read_complex_part (op0, true);
10718 case RETURN_EXPR:
10719 case LABEL_EXPR:
10720 case GOTO_EXPR:
10721 case SWITCH_EXPR:
10722 case ASM_EXPR:
10723 /* Expanded in cfgexpand.c. */
10724 gcc_unreachable ();
10726 case TRY_CATCH_EXPR:
10727 case CATCH_EXPR:
10728 case EH_FILTER_EXPR:
10729 case TRY_FINALLY_EXPR:
10730 /* Lowered by tree-eh.c. */
10731 gcc_unreachable ();
10733 case WITH_CLEANUP_EXPR:
10734 case CLEANUP_POINT_EXPR:
10735 case TARGET_EXPR:
10736 case CASE_LABEL_EXPR:
10737 case VA_ARG_EXPR:
10738 case BIND_EXPR:
10739 case INIT_EXPR:
10740 case CONJ_EXPR:
10741 case COMPOUND_EXPR:
10742 case PREINCREMENT_EXPR:
10743 case PREDECREMENT_EXPR:
10744 case POSTINCREMENT_EXPR:
10745 case POSTDECREMENT_EXPR:
10746 case LOOP_EXPR:
10747 case EXIT_EXPR:
10748 case COMPOUND_LITERAL_EXPR:
10749 /* Lowered by gimplify.c. */
10750 gcc_unreachable ();
10752 case FDESC_EXPR:
10753 /* Function descriptors are not valid except for as
10754 initialization constants, and should not be expanded. */
10755 gcc_unreachable ();
10757 case WITH_SIZE_EXPR:
10758 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10759 have pulled out the size to use in whatever context it needed. */
10760 return expand_expr_real (treeop0, original_target, tmode,
10761 modifier, alt_rtl, inner_reference_p);
10763 default:
10764 return expand_expr_real_2 (&ops, target, tmode, modifier);
10768 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10769 signedness of TYPE), possibly returning the result in TARGET. */
10770 static rtx
10771 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10773 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10774 if (target && GET_MODE (target) != GET_MODE (exp))
10775 target = 0;
10776 /* For constant values, reduce using build_int_cst_type. */
10777 if (CONST_INT_P (exp))
10779 HOST_WIDE_INT value = INTVAL (exp);
10780 tree t = build_int_cst_type (type, value);
10781 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10783 else if (TYPE_UNSIGNED (type))
10785 machine_mode mode = GET_MODE (exp);
10786 rtx mask = immed_wide_int_const
10787 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10788 return expand_and (mode, exp, mask, target);
10790 else
10792 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10793 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10794 exp, count, target, 0);
10795 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10796 exp, count, target, 0);
10800 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10801 when applied to the address of EXP produces an address known to be
10802 aligned more than BIGGEST_ALIGNMENT. */
10804 static int
10805 is_aligning_offset (const_tree offset, const_tree exp)
10807 /* Strip off any conversions. */
10808 while (CONVERT_EXPR_P (offset))
10809 offset = TREE_OPERAND (offset, 0);
10811 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10812 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10813 if (TREE_CODE (offset) != BIT_AND_EXPR
10814 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10815 || compare_tree_int (TREE_OPERAND (offset, 1),
10816 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10817 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10818 return 0;
10820 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10821 It must be NEGATE_EXPR. Then strip any more conversions. */
10822 offset = TREE_OPERAND (offset, 0);
10823 while (CONVERT_EXPR_P (offset))
10824 offset = TREE_OPERAND (offset, 0);
10826 if (TREE_CODE (offset) != NEGATE_EXPR)
10827 return 0;
10829 offset = TREE_OPERAND (offset, 0);
10830 while (CONVERT_EXPR_P (offset))
10831 offset = TREE_OPERAND (offset, 0);
10833 /* This must now be the address of EXP. */
10834 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10837 /* Return the tree node if an ARG corresponds to a string constant or zero
10838 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10839 in bytes within the string that ARG is accessing. The type of the
10840 offset will be `sizetype'. */
10842 tree
10843 string_constant (tree arg, tree *ptr_offset)
10845 tree array, offset, lower_bound;
10846 STRIP_NOPS (arg);
10848 if (TREE_CODE (arg) == ADDR_EXPR)
10850 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10852 *ptr_offset = size_zero_node;
10853 return TREE_OPERAND (arg, 0);
10855 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10857 array = TREE_OPERAND (arg, 0);
10858 offset = size_zero_node;
10860 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10862 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10863 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10864 if (TREE_CODE (array) != STRING_CST
10865 && TREE_CODE (array) != VAR_DECL)
10866 return 0;
10868 /* Check if the array has a nonzero lower bound. */
10869 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10870 if (!integer_zerop (lower_bound))
10872 /* If the offset and base aren't both constants, return 0. */
10873 if (TREE_CODE (lower_bound) != INTEGER_CST)
10874 return 0;
10875 if (TREE_CODE (offset) != INTEGER_CST)
10876 return 0;
10877 /* Adjust offset by the lower bound. */
10878 offset = size_diffop (fold_convert (sizetype, offset),
10879 fold_convert (sizetype, lower_bound));
10882 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10884 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10885 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10886 if (TREE_CODE (array) != ADDR_EXPR)
10887 return 0;
10888 array = TREE_OPERAND (array, 0);
10889 if (TREE_CODE (array) != STRING_CST
10890 && TREE_CODE (array) != VAR_DECL)
10891 return 0;
10893 else
10894 return 0;
10896 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10898 tree arg0 = TREE_OPERAND (arg, 0);
10899 tree arg1 = TREE_OPERAND (arg, 1);
10901 STRIP_NOPS (arg0);
10902 STRIP_NOPS (arg1);
10904 if (TREE_CODE (arg0) == ADDR_EXPR
10905 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10906 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10908 array = TREE_OPERAND (arg0, 0);
10909 offset = arg1;
10911 else if (TREE_CODE (arg1) == ADDR_EXPR
10912 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10913 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10915 array = TREE_OPERAND (arg1, 0);
10916 offset = arg0;
10918 else
10919 return 0;
10921 else
10922 return 0;
10924 if (TREE_CODE (array) == STRING_CST)
10926 *ptr_offset = fold_convert (sizetype, offset);
10927 return array;
10929 else if (TREE_CODE (array) == VAR_DECL
10930 || TREE_CODE (array) == CONST_DECL)
10932 int length;
10933 tree init = ctor_for_folding (array);
10935 /* Variables initialized to string literals can be handled too. */
10936 if (init == error_mark_node
10937 || !init
10938 || TREE_CODE (init) != STRING_CST)
10939 return 0;
10941 /* Avoid const char foo[4] = "abcde"; */
10942 if (DECL_SIZE_UNIT (array) == NULL_TREE
10943 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10944 || (length = TREE_STRING_LENGTH (init)) <= 0
10945 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10946 return 0;
10948 /* If variable is bigger than the string literal, OFFSET must be constant
10949 and inside of the bounds of the string literal. */
10950 offset = fold_convert (sizetype, offset);
10951 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10952 && (! tree_fits_uhwi_p (offset)
10953 || compare_tree_int (offset, length) >= 0))
10954 return 0;
10956 *ptr_offset = offset;
10957 return init;
10960 return 0;
10963 /* Generate code to calculate OPS, and exploded expression
10964 using a store-flag instruction and return an rtx for the result.
10965 OPS reflects a comparison.
10967 If TARGET is nonzero, store the result there if convenient.
10969 Return zero if there is no suitable set-flag instruction
10970 available on this machine.
10972 Once expand_expr has been called on the arguments of the comparison,
10973 we are committed to doing the store flag, since it is not safe to
10974 re-evaluate the expression. We emit the store-flag insn by calling
10975 emit_store_flag, but only expand the arguments if we have a reason
10976 to believe that emit_store_flag will be successful. If we think that
10977 it will, but it isn't, we have to simulate the store-flag with a
10978 set/jump/set sequence. */
10980 static rtx
10981 do_store_flag (sepops ops, rtx target, machine_mode mode)
10983 enum rtx_code code;
10984 tree arg0, arg1, type;
10985 machine_mode operand_mode;
10986 int unsignedp;
10987 rtx op0, op1;
10988 rtx subtarget = target;
10989 location_t loc = ops->location;
10991 arg0 = ops->op0;
10992 arg1 = ops->op1;
10994 /* Don't crash if the comparison was erroneous. */
10995 if (arg0 == error_mark_node || arg1 == error_mark_node)
10996 return const0_rtx;
10998 type = TREE_TYPE (arg0);
10999 operand_mode = TYPE_MODE (type);
11000 unsignedp = TYPE_UNSIGNED (type);
11002 /* We won't bother with BLKmode store-flag operations because it would mean
11003 passing a lot of information to emit_store_flag. */
11004 if (operand_mode == BLKmode)
11005 return 0;
11007 /* We won't bother with store-flag operations involving function pointers
11008 when function pointers must be canonicalized before comparisons. */
11009 if (targetm.have_canonicalize_funcptr_for_compare ()
11010 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11011 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11012 == FUNCTION_TYPE))
11013 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11014 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11015 == FUNCTION_TYPE))))
11016 return 0;
11018 STRIP_NOPS (arg0);
11019 STRIP_NOPS (arg1);
11021 /* For vector typed comparisons emit code to generate the desired
11022 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11023 expander for this. */
11024 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11026 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11027 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11028 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type))
11029 return expand_vec_cmp_expr (ops->type, ifexp, target);
11030 else
11032 tree if_true = constant_boolean_node (true, ops->type);
11033 tree if_false = constant_boolean_node (false, ops->type);
11034 return expand_vec_cond_expr (ops->type, ifexp, if_true,
11035 if_false, target);
11039 /* Get the rtx comparison code to use. We know that EXP is a comparison
11040 operation of some type. Some comparisons against 1 and -1 can be
11041 converted to comparisons with zero. Do so here so that the tests
11042 below will be aware that we have a comparison with zero. These
11043 tests will not catch constants in the first operand, but constants
11044 are rarely passed as the first operand. */
11046 switch (ops->code)
11048 case EQ_EXPR:
11049 code = EQ;
11050 break;
11051 case NE_EXPR:
11052 code = NE;
11053 break;
11054 case LT_EXPR:
11055 if (integer_onep (arg1))
11056 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11057 else
11058 code = unsignedp ? LTU : LT;
11059 break;
11060 case LE_EXPR:
11061 if (! unsignedp && integer_all_onesp (arg1))
11062 arg1 = integer_zero_node, code = LT;
11063 else
11064 code = unsignedp ? LEU : LE;
11065 break;
11066 case GT_EXPR:
11067 if (! unsignedp && integer_all_onesp (arg1))
11068 arg1 = integer_zero_node, code = GE;
11069 else
11070 code = unsignedp ? GTU : GT;
11071 break;
11072 case GE_EXPR:
11073 if (integer_onep (arg1))
11074 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11075 else
11076 code = unsignedp ? GEU : GE;
11077 break;
11079 case UNORDERED_EXPR:
11080 code = UNORDERED;
11081 break;
11082 case ORDERED_EXPR:
11083 code = ORDERED;
11084 break;
11085 case UNLT_EXPR:
11086 code = UNLT;
11087 break;
11088 case UNLE_EXPR:
11089 code = UNLE;
11090 break;
11091 case UNGT_EXPR:
11092 code = UNGT;
11093 break;
11094 case UNGE_EXPR:
11095 code = UNGE;
11096 break;
11097 case UNEQ_EXPR:
11098 code = UNEQ;
11099 break;
11100 case LTGT_EXPR:
11101 code = LTGT;
11102 break;
11104 default:
11105 gcc_unreachable ();
11108 /* Put a constant second. */
11109 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11110 || TREE_CODE (arg0) == FIXED_CST)
11112 std::swap (arg0, arg1);
11113 code = swap_condition (code);
11116 /* If this is an equality or inequality test of a single bit, we can
11117 do this by shifting the bit being tested to the low-order bit and
11118 masking the result with the constant 1. If the condition was EQ,
11119 we xor it with 1. This does not require an scc insn and is faster
11120 than an scc insn even if we have it.
11122 The code to make this transformation was moved into fold_single_bit_test,
11123 so we just call into the folder and expand its result. */
11125 if ((code == NE || code == EQ)
11126 && integer_zerop (arg1)
11127 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11129 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11130 if (srcstmt
11131 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11133 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11134 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11135 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11136 gimple_assign_rhs1 (srcstmt),
11137 gimple_assign_rhs2 (srcstmt));
11138 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11139 if (temp)
11140 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11144 if (! get_subtarget (target)
11145 || GET_MODE (subtarget) != operand_mode)
11146 subtarget = 0;
11148 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11150 if (target == 0)
11151 target = gen_reg_rtx (mode);
11153 /* Try a cstore if possible. */
11154 return emit_store_flag_force (target, code, op0, op1,
11155 operand_mode, unsignedp,
11156 (TYPE_PRECISION (ops->type) == 1
11157 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11160 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11161 0 otherwise (i.e. if there is no casesi instruction).
11163 DEFAULT_PROBABILITY is the probability of jumping to the default
11164 label. */
11166 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11167 rtx table_label, rtx default_label, rtx fallback_label,
11168 int default_probability)
11170 struct expand_operand ops[5];
11171 machine_mode index_mode = SImode;
11172 rtx op1, op2, index;
11174 if (! targetm.have_casesi ())
11175 return 0;
11177 /* Convert the index to SImode. */
11178 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11180 machine_mode omode = TYPE_MODE (index_type);
11181 rtx rangertx = expand_normal (range);
11183 /* We must handle the endpoints in the original mode. */
11184 index_expr = build2 (MINUS_EXPR, index_type,
11185 index_expr, minval);
11186 minval = integer_zero_node;
11187 index = expand_normal (index_expr);
11188 if (default_label)
11189 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11190 omode, 1, default_label,
11191 default_probability);
11192 /* Now we can safely truncate. */
11193 index = convert_to_mode (index_mode, index, 0);
11195 else
11197 if (TYPE_MODE (index_type) != index_mode)
11199 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11200 index_expr = fold_convert (index_type, index_expr);
11203 index = expand_normal (index_expr);
11206 do_pending_stack_adjust ();
11208 op1 = expand_normal (minval);
11209 op2 = expand_normal (range);
11211 create_input_operand (&ops[0], index, index_mode);
11212 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11213 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11214 create_fixed_operand (&ops[3], table_label);
11215 create_fixed_operand (&ops[4], (default_label
11216 ? default_label
11217 : fallback_label));
11218 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11219 return 1;
11222 /* Attempt to generate a tablejump instruction; same concept. */
11223 /* Subroutine of the next function.
11225 INDEX is the value being switched on, with the lowest value
11226 in the table already subtracted.
11227 MODE is its expected mode (needed if INDEX is constant).
11228 RANGE is the length of the jump table.
11229 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11231 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11232 index value is out of range.
11233 DEFAULT_PROBABILITY is the probability of jumping to
11234 the default label. */
11236 static void
11237 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11238 rtx default_label, int default_probability)
11240 rtx temp, vector;
11242 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11243 cfun->cfg->max_jumptable_ents = INTVAL (range);
11245 /* Do an unsigned comparison (in the proper mode) between the index
11246 expression and the value which represents the length of the range.
11247 Since we just finished subtracting the lower bound of the range
11248 from the index expression, this comparison allows us to simultaneously
11249 check that the original index expression value is both greater than
11250 or equal to the minimum value of the range and less than or equal to
11251 the maximum value of the range. */
11253 if (default_label)
11254 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11255 default_label, default_probability);
11258 /* If index is in range, it must fit in Pmode.
11259 Convert to Pmode so we can index with it. */
11260 if (mode != Pmode)
11261 index = convert_to_mode (Pmode, index, 1);
11263 /* Don't let a MEM slip through, because then INDEX that comes
11264 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11265 and break_out_memory_refs will go to work on it and mess it up. */
11266 #ifdef PIC_CASE_VECTOR_ADDRESS
11267 if (flag_pic && !REG_P (index))
11268 index = copy_to_mode_reg (Pmode, index);
11269 #endif
11271 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11272 GET_MODE_SIZE, because this indicates how large insns are. The other
11273 uses should all be Pmode, because they are addresses. This code
11274 could fail if addresses and insns are not the same size. */
11275 index = simplify_gen_binary (MULT, Pmode, index,
11276 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11277 Pmode));
11278 index = simplify_gen_binary (PLUS, Pmode, index,
11279 gen_rtx_LABEL_REF (Pmode, table_label));
11281 #ifdef PIC_CASE_VECTOR_ADDRESS
11282 if (flag_pic)
11283 index = PIC_CASE_VECTOR_ADDRESS (index);
11284 else
11285 #endif
11286 index = memory_address (CASE_VECTOR_MODE, index);
11287 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11288 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11289 convert_move (temp, vector, 0);
11291 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11293 /* If we are generating PIC code or if the table is PC-relative, the
11294 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11295 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11296 emit_barrier ();
11300 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11301 rtx table_label, rtx default_label, int default_probability)
11303 rtx index;
11305 if (! targetm.have_tablejump ())
11306 return 0;
11308 index_expr = fold_build2 (MINUS_EXPR, index_type,
11309 fold_convert (index_type, index_expr),
11310 fold_convert (index_type, minval));
11311 index = expand_normal (index_expr);
11312 do_pending_stack_adjust ();
11314 do_tablejump (index, TYPE_MODE (index_type),
11315 convert_modes (TYPE_MODE (index_type),
11316 TYPE_MODE (TREE_TYPE (range)),
11317 expand_normal (range),
11318 TYPE_UNSIGNED (TREE_TYPE (range))),
11319 table_label, default_label, default_probability);
11320 return 1;
11323 /* Return a CONST_VECTOR rtx representing vector mask for
11324 a VECTOR_CST of booleans. */
11325 static rtx
11326 const_vector_mask_from_tree (tree exp)
11328 rtvec v;
11329 unsigned i;
11330 int units;
11331 tree elt;
11332 machine_mode inner, mode;
11334 mode = TYPE_MODE (TREE_TYPE (exp));
11335 units = GET_MODE_NUNITS (mode);
11336 inner = GET_MODE_INNER (mode);
11338 v = rtvec_alloc (units);
11340 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11342 elt = VECTOR_CST_ELT (exp, i);
11344 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11345 if (integer_zerop (elt))
11346 RTVEC_ELT (v, i) = CONST0_RTX (inner);
11347 else if (integer_onep (elt)
11348 || integer_minus_onep (elt))
11349 RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11350 else
11351 gcc_unreachable ();
11354 return gen_rtx_CONST_VECTOR (mode, v);
11357 /* Return a CONST_INT rtx representing vector mask for
11358 a VECTOR_CST of booleans. */
11359 static rtx
11360 const_scalar_mask_from_tree (tree exp)
11362 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11363 wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11364 tree elt;
11365 unsigned i;
11367 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11369 elt = VECTOR_CST_ELT (exp, i);
11370 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11371 if (integer_all_onesp (elt))
11372 res = wi::set_bit (res, i);
11373 else
11374 gcc_assert (integer_zerop (elt));
11377 return immed_wide_int_const (res, mode);
11380 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11381 static rtx
11382 const_vector_from_tree (tree exp)
11384 rtvec v;
11385 unsigned i;
11386 int units;
11387 tree elt;
11388 machine_mode inner, mode;
11390 mode = TYPE_MODE (TREE_TYPE (exp));
11392 if (initializer_zerop (exp))
11393 return CONST0_RTX (mode);
11395 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11396 return const_vector_mask_from_tree (exp);
11398 units = GET_MODE_NUNITS (mode);
11399 inner = GET_MODE_INNER (mode);
11401 v = rtvec_alloc (units);
11403 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11405 elt = VECTOR_CST_ELT (exp, i);
11407 if (TREE_CODE (elt) == REAL_CST)
11408 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11409 inner);
11410 else if (TREE_CODE (elt) == FIXED_CST)
11411 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11412 inner);
11413 else
11414 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11417 return gen_rtx_CONST_VECTOR (mode, v);
11420 /* Build a decl for a personality function given a language prefix. */
11422 tree
11423 build_personality_function (const char *lang)
11425 const char *unwind_and_version;
11426 tree decl, type;
11427 char *name;
11429 switch (targetm_common.except_unwind_info (&global_options))
11431 case UI_NONE:
11432 return NULL;
11433 case UI_SJLJ:
11434 unwind_and_version = "_sj0";
11435 break;
11436 case UI_DWARF2:
11437 case UI_TARGET:
11438 unwind_and_version = "_v0";
11439 break;
11440 case UI_SEH:
11441 unwind_and_version = "_seh0";
11442 break;
11443 default:
11444 gcc_unreachable ();
11447 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11449 type = build_function_type_list (integer_type_node, integer_type_node,
11450 long_long_unsigned_type_node,
11451 ptr_type_node, ptr_type_node, NULL_TREE);
11452 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11453 get_identifier (name), type);
11454 DECL_ARTIFICIAL (decl) = 1;
11455 DECL_EXTERNAL (decl) = 1;
11456 TREE_PUBLIC (decl) = 1;
11458 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11459 are the flags assigned by targetm.encode_section_info. */
11460 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11462 return decl;
11465 /* Extracts the personality function of DECL and returns the corresponding
11466 libfunc. */
11469 get_personality_function (tree decl)
11471 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11472 enum eh_personality_kind pk;
11474 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11475 if (pk == eh_personality_none)
11476 return NULL;
11478 if (!personality
11479 && pk == eh_personality_any)
11480 personality = lang_hooks.eh_personality ();
11482 if (pk == eh_personality_lang)
11483 gcc_assert (personality != NULL_TREE);
11485 return XEXP (DECL_RTL (personality), 0);
11488 /* Returns a tree for the size of EXP in bytes. */
11490 static tree
11491 tree_expr_size (const_tree exp)
11493 if (DECL_P (exp)
11494 && DECL_SIZE_UNIT (exp) != 0)
11495 return DECL_SIZE_UNIT (exp);
11496 else
11497 return size_in_bytes (TREE_TYPE (exp));
11500 /* Return an rtx for the size in bytes of the value of EXP. */
11503 expr_size (tree exp)
11505 tree size;
11507 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11508 size = TREE_OPERAND (exp, 1);
11509 else
11511 size = tree_expr_size (exp);
11512 gcc_assert (size);
11513 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11516 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11519 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11520 if the size can vary or is larger than an integer. */
11522 static HOST_WIDE_INT
11523 int_expr_size (tree exp)
11525 tree size;
11527 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11528 size = TREE_OPERAND (exp, 1);
11529 else
11531 size = tree_expr_size (exp);
11532 gcc_assert (size);
11535 if (size == 0 || !tree_fits_shwi_p (size))
11536 return -1;
11538 return tree_to_shwi (size);